first commit
This commit is contained in:
8
services/docstore/.gitignore
vendored
Normal file
8
services/docstore/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
node_modules
|
||||
forever
|
||||
|
||||
# managed by dev-environment$ bin/update_build_scripts
|
||||
.npmrc
|
||||
|
||||
# Jetbrains IDEs
|
||||
.idea
|
3
services/docstore/.mocharc.json
Normal file
3
services/docstore/.mocharc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"require": "test/setup.js"
|
||||
}
|
1
services/docstore/.nvmrc
Normal file
1
services/docstore/.nvmrc
Normal file
@@ -0,0 +1 @@
|
||||
20.18.2
|
27
services/docstore/Dockerfile
Normal file
27
services/docstore/Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
FROM node:20.18.2 AS base
|
||||
|
||||
WORKDIR /overleaf/services/docstore
|
||||
|
||||
# Google Cloud Storage needs a writable $HOME/.config for resumable uploads
|
||||
# (see https://googleapis.dev/nodejs/storage/latest/File.html#createWriteStream)
|
||||
RUN mkdir /home/node/.config && chown node:node /home/node/.config
|
||||
|
||||
FROM base AS app
|
||||
|
||||
COPY package.json package-lock.json /overleaf/
|
||||
COPY services/docstore/package.json /overleaf/services/docstore/
|
||||
COPY libraries/ /overleaf/libraries/
|
||||
COPY patches/ /overleaf/patches/
|
||||
|
||||
RUN cd /overleaf && npm ci --quiet
|
||||
|
||||
COPY services/docstore/ /overleaf/services/docstore/
|
||||
|
||||
FROM app
|
||||
USER node
|
||||
|
||||
CMD ["node", "--expose-gc", "app.js"]
|
662
services/docstore/LICENSE
Normal file
662
services/docstore/LICENSE
Normal file
@@ -0,0 +1,662 @@
|
||||
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
156
services/docstore/Makefile
Normal file
156
services/docstore/Makefile
Normal file
@@ -0,0 +1,156 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
BUILD_NUMBER ?= local
|
||||
BRANCH_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
PROJECT_NAME = docstore
|
||||
BUILD_DIR_NAME = $(shell pwd | xargs basename | tr -cd '[a-zA-Z0-9_.\-]')
|
||||
|
||||
DOCKER_COMPOSE_FLAGS ?= -f docker-compose.yml
|
||||
DOCKER_COMPOSE := BUILD_NUMBER=$(BUILD_NUMBER) \
|
||||
BRANCH_NAME=$(BRANCH_NAME) \
|
||||
PROJECT_NAME=$(PROJECT_NAME) \
|
||||
MOCHA_GREP=${MOCHA_GREP} \
|
||||
docker compose ${DOCKER_COMPOSE_FLAGS}
|
||||
|
||||
COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE ?= test_acceptance_$(BUILD_DIR_NAME)
|
||||
DOCKER_COMPOSE_TEST_ACCEPTANCE = \
|
||||
COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_ACCEPTANCE) $(DOCKER_COMPOSE)
|
||||
|
||||
COMPOSE_PROJECT_NAME_TEST_UNIT ?= test_unit_$(BUILD_DIR_NAME)
|
||||
DOCKER_COMPOSE_TEST_UNIT = \
|
||||
COMPOSE_PROJECT_NAME=$(COMPOSE_PROJECT_NAME_TEST_UNIT) $(DOCKER_COMPOSE)
|
||||
|
||||
clean:
|
||||
-docker rmi ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-docker rmi us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
-$(DOCKER_COMPOSE_TEST_UNIT) down --rmi local
|
||||
-$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down --rmi local
|
||||
|
||||
HERE=$(shell pwd)
|
||||
MONOREPO=$(shell cd ../../ && pwd)
|
||||
# Run the linting commands in the scope of the monorepo.
|
||||
# Eslint and prettier (plus some configs) are on the root.
|
||||
RUN_LINTING = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(HERE) node:20.18.2 npm run --silent
|
||||
|
||||
RUN_LINTING_CI = docker run --rm --volume $(MONOREPO)/.editorconfig:/overleaf/.editorconfig --volume $(MONOREPO)/.eslintignore:/overleaf/.eslintignore --volume $(MONOREPO)/.eslintrc:/overleaf/.eslintrc --volume $(MONOREPO)/.prettierignore:/overleaf/.prettierignore --volume $(MONOREPO)/.prettierrc:/overleaf/.prettierrc --volume $(MONOREPO)/tsconfig.backend.json:/overleaf/tsconfig.backend.json ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) npm run --silent
|
||||
|
||||
# Same but from the top of the monorepo
|
||||
RUN_LINTING_MONOREPO = docker run --rm -v $(MONOREPO):$(MONOREPO) -w $(MONOREPO) node:20.18.2 npm run --silent
|
||||
|
||||
SHELLCHECK_OPTS = \
|
||||
--shell=bash \
|
||||
--external-sources
|
||||
SHELLCHECK_COLOR := $(if $(CI),--color=never,--color)
|
||||
SHELLCHECK_FILES := { git ls-files "*.sh" -z; git grep -Plz "\A\#\!.*bash"; } | sort -zu
|
||||
|
||||
shellcheck:
|
||||
@$(SHELLCHECK_FILES) | xargs -0 -r docker run --rm -v $(HERE):/mnt -w /mnt \
|
||||
koalaman/shellcheck:stable $(SHELLCHECK_OPTS) $(SHELLCHECK_COLOR)
|
||||
|
||||
shellcheck_fix:
|
||||
@$(SHELLCHECK_FILES) | while IFS= read -r -d '' file; do \
|
||||
diff=$$(docker run --rm -v $(HERE):/mnt -w /mnt koalaman/shellcheck:stable $(SHELLCHECK_OPTS) --format=diff "$$file" 2>/dev/null); \
|
||||
if [ -n "$$diff" ] && ! echo "$$diff" | patch -p1 >/dev/null 2>&1; then echo "\033[31m$$file\033[0m"; \
|
||||
elif [ -n "$$diff" ]; then echo "$$file"; \
|
||||
else echo "\033[2m$$file\033[0m"; fi \
|
||||
done
|
||||
|
||||
format:
|
||||
$(RUN_LINTING) format
|
||||
|
||||
format_ci:
|
||||
$(RUN_LINTING_CI) format
|
||||
|
||||
format_fix:
|
||||
$(RUN_LINTING) format:fix
|
||||
|
||||
lint:
|
||||
$(RUN_LINTING) lint
|
||||
|
||||
lint_ci:
|
||||
$(RUN_LINTING_CI) lint
|
||||
|
||||
lint_fix:
|
||||
$(RUN_LINTING) lint:fix
|
||||
|
||||
typecheck:
|
||||
$(RUN_LINTING) types:check
|
||||
|
||||
typecheck_ci:
|
||||
$(RUN_LINTING_CI) types:check
|
||||
|
||||
test: format lint typecheck shellcheck test_unit test_acceptance
|
||||
|
||||
test_unit:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) run --rm test_unit
|
||||
$(MAKE) test_unit_clean
|
||||
endif
|
||||
|
||||
test_clean: test_unit_clean
|
||||
test_unit_clean:
|
||||
ifneq (,$(wildcard test/unit))
|
||||
$(DOCKER_COMPOSE_TEST_UNIT) down -v -t 0
|
||||
endif
|
||||
|
||||
test_acceptance: test_acceptance_clean test_acceptance_pre_run test_acceptance_run
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_debug: test_acceptance_clean test_acceptance_pre_run test_acceptance_run_debug
|
||||
$(MAKE) test_acceptance_clean
|
||||
|
||||
test_acceptance_run:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance
|
||||
endif
|
||||
|
||||
test_acceptance_run_debug:
|
||||
ifneq (,$(wildcard test/acceptance))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run -p 127.0.0.9:19999:19999 --rm test_acceptance npm run test:acceptance -- --inspect=0.0.0.0:19999 --inspect-brk
|
||||
endif
|
||||
|
||||
test_clean: test_acceptance_clean
|
||||
test_acceptance_clean:
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) down -v -t 0
|
||||
|
||||
test_acceptance_pre_run:
|
||||
ifneq (,$(wildcard test/acceptance/js/scripts/pre-run))
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance test/acceptance/js/scripts/pre-run
|
||||
endif
|
||||
|
||||
benchmarks:
|
||||
$(DOCKER_COMPOSE_TEST_ACCEPTANCE) run --rm test_acceptance npm run benchmarks
|
||||
|
||||
build:
|
||||
docker build \
|
||||
--pull \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--tag ci/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER) \
|
||||
--tag us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \
|
||||
--cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):$(BRANCH_NAME) \
|
||||
--cache-from us-east1-docker.pkg.dev/overleaf-ops/ol-docker/$(PROJECT_NAME):main \
|
||||
--file Dockerfile \
|
||||
../..
|
||||
|
||||
tar:
|
||||
$(DOCKER_COMPOSE) up tar
|
||||
|
||||
publish:
|
||||
|
||||
docker push $(DOCKER_REPO)/$(PROJECT_NAME):$(BRANCH_NAME)-$(BUILD_NUMBER)
|
||||
|
||||
|
||||
.PHONY: clean \
|
||||
format format_fix \
|
||||
lint lint_fix \
|
||||
build_types typecheck \
|
||||
lint_ci format_ci typecheck_ci \
|
||||
shellcheck shellcheck_fix \
|
||||
test test_clean test_unit test_unit_clean \
|
||||
test_acceptance test_acceptance_debug test_acceptance_pre_run \
|
||||
test_acceptance_run test_acceptance_run_debug test_acceptance_clean \
|
||||
benchmarks \
|
||||
build tar publish \
|
11
services/docstore/README.md
Normal file
11
services/docstore/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
overleaf/docstore
|
||||
===================
|
||||
|
||||
A CRUD API for storing and updating text documents in projects
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The code in this repository is released under the GNU AFFERO GENERAL PUBLIC LICENSE, version 3. A copy can be found in the `LICENSE` file.
|
||||
|
||||
Copyright (c) Overleaf, 2014-2019.
|
132
services/docstore/app.js
Normal file
132
services/docstore/app.js
Normal file
@@ -0,0 +1,132 @@
|
||||
// Metrics must be initialized before importing anything else
|
||||
require('@overleaf/metrics/initialize')
|
||||
|
||||
const Events = require('node:events')
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger')
|
||||
const express = require('express')
|
||||
const bodyParser = require('body-parser')
|
||||
const {
|
||||
celebrate: validate,
|
||||
Joi,
|
||||
errors: handleValidationErrors,
|
||||
} = require('celebrate')
|
||||
const { mongoClient } = require('./app/js/mongodb')
|
||||
const Errors = require('./app/js/Errors')
|
||||
const HttpController = require('./app/js/HttpController')
|
||||
|
||||
Events.setMaxListeners(20)
|
||||
|
||||
logger.initialize('docstore')
|
||||
if (Metrics.event_loop != null) {
|
||||
Metrics.event_loop.monitor(logger)
|
||||
}
|
||||
Metrics.leaked_sockets.monitor(logger)
|
||||
Metrics.open_sockets.monitor()
|
||||
|
||||
const app = express()
|
||||
|
||||
app.use(Metrics.http.monitor(logger))
|
||||
|
||||
Metrics.injectMetricsRoute(app)
|
||||
|
||||
app.param('project_id', function (req, res, next, projectId) {
|
||||
if (projectId?.match(/^[0-9a-f]{24}$/)) {
|
||||
next()
|
||||
} else {
|
||||
next(new Error('invalid project id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.param('doc_id', function (req, res, next, docId) {
|
||||
if (docId?.match(/^[0-9a-f]{24}$/)) {
|
||||
next()
|
||||
} else {
|
||||
next(new Error('invalid doc id'))
|
||||
}
|
||||
})
|
||||
|
||||
app.get('/project/:project_id/doc-deleted', HttpController.getAllDeletedDocs)
|
||||
app.get('/project/:project_id/doc', HttpController.getAllDocs)
|
||||
app.get('/project/:project_id/ranges', HttpController.getAllRanges)
|
||||
app.get('/project/:project_id/has-ranges', HttpController.projectHasRanges)
|
||||
app.get('/project/:project_id/doc/:doc_id', HttpController.getDoc)
|
||||
app.get('/project/:project_id/doc/:doc_id/deleted', HttpController.isDocDeleted)
|
||||
app.get('/project/:project_id/doc/:doc_id/raw', HttpController.getRawDoc)
|
||||
app.get('/project/:project_id/doc/:doc_id/peek', HttpController.peekDoc)
|
||||
// Add 64kb overhead for the JSON encoding, and double the size to allow for ranges in the json payload
|
||||
app.post(
|
||||
'/project/:project_id/doc/:doc_id',
|
||||
bodyParser.json({ limit: Settings.maxJsonRequestSize }),
|
||||
HttpController.updateDoc
|
||||
)
|
||||
app.patch(
|
||||
'/project/:project_id/doc/:doc_id',
|
||||
bodyParser.json(),
|
||||
validate({
|
||||
body: {
|
||||
deleted: Joi.boolean(),
|
||||
name: Joi.string().when('deleted', { is: true, then: Joi.required() }),
|
||||
deletedAt: Joi.date().when('deleted', { is: true, then: Joi.required() }),
|
||||
},
|
||||
}),
|
||||
HttpController.patchDoc
|
||||
)
|
||||
app.delete('/project/:project_id/doc/:doc_id', (req, res) => {
|
||||
res.status(500).send('DELETE-ing a doc is DEPRECATED. PATCH the doc instead.')
|
||||
})
|
||||
|
||||
app.post('/project/:project_id/archive', HttpController.archiveAllDocs)
|
||||
app.post('/project/:project_id/doc/:doc_id/archive', HttpController.archiveDoc)
|
||||
app.post('/project/:project_id/unarchive', HttpController.unArchiveAllDocs)
|
||||
app.post('/project/:project_id/destroy', HttpController.destroyProject)
|
||||
|
||||
app.get('/health_check', HttpController.healthCheck)
|
||||
|
||||
app.get('/status', (req, res) => res.send('docstore is alive'))
|
||||
|
||||
app.use(handleValidationErrors())
|
||||
app.use(function (error, req, res, next) {
|
||||
if (error instanceof Errors.NotFoundError) {
|
||||
logger.warn({ req }, 'not found')
|
||||
res.sendStatus(404)
|
||||
} else if (error instanceof Errors.DocModifiedError) {
|
||||
logger.warn({ req }, 'conflict: doc modified')
|
||||
res.sendStatus(409)
|
||||
} else if (error instanceof Errors.DocVersionDecrementedError) {
|
||||
logger.warn({ req }, 'conflict: doc version decremented')
|
||||
res.sendStatus(409)
|
||||
} else {
|
||||
logger.error({ err: error, req }, 'request errored')
|
||||
res.status(500).send('Oops, something went wrong')
|
||||
}
|
||||
})
|
||||
|
||||
const { port } = Settings.internal.docstore
|
||||
const { host } = Settings.internal.docstore
|
||||
|
||||
if (!module.parent) {
|
||||
// Called directly
|
||||
mongoClient
|
||||
.connect()
|
||||
.then(() => {
|
||||
const server = app.listen(port, host, function (err) {
|
||||
if (err) {
|
||||
logger.fatal({ err }, `Cannot bind to ${host}:${port}. Exiting.`)
|
||||
process.exit(1)
|
||||
}
|
||||
logger.debug(`Docstore starting up, listening on ${host}:${port}`)
|
||||
})
|
||||
server.timeout = 120000
|
||||
server.keepAliveTimeout = 5000
|
||||
server.requestTimeout = 60000
|
||||
server.headersTimeout = 60000
|
||||
})
|
||||
.catch(err => {
|
||||
logger.fatal({ err }, 'Cannot connect to mongo. Exiting.')
|
||||
process.exit(1)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = app
|
227
services/docstore/app/js/DocArchiveManager.js
Normal file
227
services/docstore/app/js/DocArchiveManager.js
Normal file
@@ -0,0 +1,227 @@
|
||||
const { callbackify } = require('node:util')
|
||||
const MongoManager = require('./MongoManager').promises
|
||||
const Errors = require('./Errors')
|
||||
const logger = require('@overleaf/logger')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const crypto = require('node:crypto')
|
||||
const { ReadableString } = require('@overleaf/stream-utils')
|
||||
const RangeManager = require('./RangeManager')
|
||||
const PersistorManager = require('./PersistorManager')
|
||||
const pMap = require('p-map')
|
||||
const { streamToBuffer } = require('./StreamToBuffer').promises
|
||||
const { BSON } = require('mongodb-legacy')
|
||||
|
||||
const PARALLEL_JOBS = Settings.parallelArchiveJobs
|
||||
const UN_ARCHIVE_BATCH_SIZE = Settings.unArchiveBatchSize
|
||||
|
||||
module.exports = {
|
||||
archiveAllDocs: callbackify(archiveAllDocs),
|
||||
archiveDoc: callbackify(archiveDoc),
|
||||
unArchiveAllDocs: callbackify(unArchiveAllDocs),
|
||||
unarchiveDoc: callbackify(unarchiveDoc),
|
||||
destroyProject: callbackify(destroyProject),
|
||||
getDoc: callbackify(getDoc),
|
||||
promises: {
|
||||
archiveAllDocs,
|
||||
archiveDoc,
|
||||
unArchiveAllDocs,
|
||||
unarchiveDoc,
|
||||
destroyProject,
|
||||
getDoc,
|
||||
},
|
||||
}
|
||||
|
||||
async function archiveAllDocs(projectId) {
|
||||
if (!_isArchivingEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
const docIds = await MongoManager.getNonArchivedProjectDocIds(projectId)
|
||||
await pMap(docIds, docId => archiveDoc(projectId, docId), {
|
||||
concurrency: PARALLEL_JOBS,
|
||||
})
|
||||
}
|
||||
|
||||
async function archiveDoc(projectId, docId) {
|
||||
if (!_isArchivingEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
const doc = await MongoManager.getDocForArchiving(projectId, docId)
|
||||
|
||||
if (!doc) {
|
||||
// The doc wasn't found, it was already archived, or the lock couldn't be
|
||||
// acquired. Since we don't know which it is, silently return.
|
||||
return
|
||||
}
|
||||
|
||||
logger.debug({ projectId, docId: doc._id }, 'sending doc to persistor')
|
||||
const key = `${projectId}/${doc._id}`
|
||||
|
||||
if (doc.lines == null) {
|
||||
throw new Error('doc has no lines')
|
||||
}
|
||||
|
||||
// warn about any oversized docs already in mongo
|
||||
const linesSize = BSON.calculateObjectSize(doc.lines || {})
|
||||
const rangesSize = BSON.calculateObjectSize(doc.ranges || {})
|
||||
if (
|
||||
linesSize > Settings.max_doc_length ||
|
||||
rangesSize > Settings.max_doc_length
|
||||
) {
|
||||
logger.warn(
|
||||
{ projectId, docId: doc._id, linesSize, rangesSize },
|
||||
'large doc found when archiving project'
|
||||
)
|
||||
}
|
||||
|
||||
const json = JSON.stringify({
|
||||
lines: doc.lines,
|
||||
ranges: doc.ranges,
|
||||
rev: doc.rev,
|
||||
schema_v: 1,
|
||||
})
|
||||
|
||||
// this should never happen, but protects against memory-corruption errors that
|
||||
// have happened in the past
|
||||
if (json.indexOf('\u0000') > -1) {
|
||||
const error = new Error('null bytes detected')
|
||||
logger.err({ err: error, doc }, error.message)
|
||||
throw error
|
||||
}
|
||||
|
||||
const md5 = crypto.createHash('md5').update(json).digest('hex')
|
||||
const stream = new ReadableString(json)
|
||||
await PersistorManager.sendStream(Settings.docstore.bucket, key, stream, {
|
||||
sourceMd5: md5,
|
||||
})
|
||||
await MongoManager.markDocAsArchived(projectId, docId, doc.rev)
|
||||
}
|
||||
|
||||
async function unArchiveAllDocs(projectId) {
|
||||
if (!_isArchivingEnabled()) {
|
||||
return
|
||||
}
|
||||
|
||||
while (true) {
|
||||
let docs
|
||||
if (Settings.docstore.keepSoftDeletedDocsArchived) {
|
||||
docs = await MongoManager.getNonDeletedArchivedProjectDocs(
|
||||
projectId,
|
||||
UN_ARCHIVE_BATCH_SIZE
|
||||
)
|
||||
} else {
|
||||
docs = await MongoManager.getArchivedProjectDocs(
|
||||
projectId,
|
||||
UN_ARCHIVE_BATCH_SIZE
|
||||
)
|
||||
}
|
||||
if (!docs || docs.length === 0) {
|
||||
break
|
||||
}
|
||||
await pMap(docs, doc => unarchiveDoc(projectId, doc._id), {
|
||||
concurrency: PARALLEL_JOBS,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// get the doc from the PersistorManager without storing it in mongo
|
||||
async function getDoc(projectId, docId) {
|
||||
const key = `${projectId}/${docId}`
|
||||
const sourceMd5 = await PersistorManager.getObjectMd5Hash(
|
||||
Settings.docstore.bucket,
|
||||
key
|
||||
)
|
||||
const stream = await PersistorManager.getObjectStream(
|
||||
Settings.docstore.bucket,
|
||||
key
|
||||
)
|
||||
stream.resume()
|
||||
const buffer = await streamToBuffer(projectId, docId, stream)
|
||||
const md5 = crypto.createHash('md5').update(buffer).digest('hex')
|
||||
if (sourceMd5 !== md5) {
|
||||
throw new Errors.Md5MismatchError('md5 mismatch when downloading doc', {
|
||||
key,
|
||||
sourceMd5,
|
||||
md5,
|
||||
})
|
||||
}
|
||||
|
||||
return _deserializeArchivedDoc(buffer)
|
||||
}
|
||||
|
||||
// get the doc and unarchive it to mongo
|
||||
async function unarchiveDoc(projectId, docId) {
|
||||
logger.debug({ projectId, docId }, 'getting doc from persistor')
|
||||
const mongoDoc = await MongoManager.findDoc(projectId, docId, {
|
||||
inS3: 1,
|
||||
rev: 1,
|
||||
})
|
||||
if (!mongoDoc.inS3) {
|
||||
// The doc is already unarchived
|
||||
return
|
||||
}
|
||||
|
||||
if (!_isArchivingEnabled()) {
|
||||
throw new Error(
|
||||
'found archived doc, but archiving backend is not configured'
|
||||
)
|
||||
}
|
||||
|
||||
const archivedDoc = await getDoc(projectId, docId)
|
||||
if (archivedDoc.rev == null) {
|
||||
// Older archived docs didn't have a rev. Assume that the rev of the
|
||||
// archived doc is the rev that was stored in Mongo when we retrieved it
|
||||
// earlier.
|
||||
archivedDoc.rev = mongoDoc.rev
|
||||
}
|
||||
await MongoManager.restoreArchivedDoc(projectId, docId, archivedDoc)
|
||||
}
|
||||
|
||||
async function destroyProject(projectId) {
|
||||
const tasks = [MongoManager.destroyProject(projectId)]
|
||||
if (_isArchivingEnabled()) {
|
||||
tasks.push(
|
||||
PersistorManager.deleteDirectory(Settings.docstore.bucket, projectId)
|
||||
)
|
||||
}
|
||||
await Promise.all(tasks)
|
||||
}
|
||||
|
||||
function _deserializeArchivedDoc(buffer) {
|
||||
const doc = JSON.parse(buffer)
|
||||
|
||||
const result = {}
|
||||
if (doc.schema_v === 1 && doc.lines != null) {
|
||||
result.lines = doc.lines
|
||||
if (doc.ranges != null) {
|
||||
result.ranges = RangeManager.jsonRangesToMongo(doc.ranges)
|
||||
}
|
||||
} else if (Array.isArray(doc)) {
|
||||
result.lines = doc
|
||||
} else {
|
||||
throw new Error("I don't understand the doc format in s3")
|
||||
}
|
||||
|
||||
if (doc.rev != null) {
|
||||
result.rev = doc.rev
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
function _isArchivingEnabled() {
|
||||
const backend = Settings.docstore.backend
|
||||
|
||||
if (!backend) {
|
||||
return false
|
||||
}
|
||||
|
||||
// The default backend is S3. If another backend is configured or the S3
|
||||
// backend itself is correctly configured, then archiving is enabled.
|
||||
if (backend === 's3' && Settings.docstore.s3 == null) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
297
services/docstore/app/js/DocManager.js
Normal file
297
services/docstore/app/js/DocManager.js
Normal file
@@ -0,0 +1,297 @@
|
||||
const MongoManager = require('./MongoManager')
|
||||
const Errors = require('./Errors')
|
||||
const logger = require('@overleaf/logger')
|
||||
const _ = require('lodash')
|
||||
const DocArchive = require('./DocArchiveManager')
|
||||
const RangeManager = require('./RangeManager')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { callbackifyAll } = require('@overleaf/promise-utils')
|
||||
const { setTimeout } = require('node:timers/promises')
|
||||
|
||||
/**
|
||||
* @import { Document } from 'mongodb'
|
||||
* @import { WithId } from 'mongodb'
|
||||
*/
|
||||
|
||||
const DocManager = {
|
||||
/**
|
||||
* @param {string} projectId
|
||||
* @param {string} docId
|
||||
* @param {{inS3: boolean}} filter
|
||||
* @returns {Promise<WithId<Document>>}
|
||||
* @private
|
||||
*/
|
||||
async _getDoc(projectId, docId, filter) {
|
||||
if (filter == null) {
|
||||
filter = {}
|
||||
}
|
||||
if (filter.inS3 !== true) {
|
||||
throw new Error('must include inS3 when getting doc')
|
||||
}
|
||||
|
||||
const doc = await MongoManager.promises.findDoc(projectId, docId, filter)
|
||||
|
||||
if (doc == null) {
|
||||
throw new Errors.NotFoundError(
|
||||
`No such doc: ${docId} in project ${projectId}`
|
||||
)
|
||||
}
|
||||
|
||||
if (doc.inS3) {
|
||||
await DocArchive.promises.unarchiveDoc(projectId, docId)
|
||||
return await DocManager._getDoc(projectId, docId, filter)
|
||||
}
|
||||
|
||||
return doc
|
||||
},
|
||||
|
||||
async isDocDeleted(projectId, docId) {
|
||||
const doc = await MongoManager.promises.findDoc(projectId, docId, {
|
||||
deleted: true,
|
||||
})
|
||||
|
||||
if (!doc) {
|
||||
throw new Errors.NotFoundError(
|
||||
`No such project/doc: ${projectId}/${docId}`
|
||||
)
|
||||
}
|
||||
|
||||
// `doc.deleted` is `undefined` for non deleted docs
|
||||
return Boolean(doc.deleted)
|
||||
},
|
||||
|
||||
async getFullDoc(projectId, docId) {
|
||||
const doc = await DocManager._getDoc(projectId, docId, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
return doc
|
||||
},
|
||||
|
||||
// returns the doc without any version information
|
||||
async _peekRawDoc(projectId, docId) {
|
||||
const doc = await MongoManager.promises.findDoc(projectId, docId, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
|
||||
if (doc == null) {
|
||||
throw new Errors.NotFoundError(
|
||||
`No such doc: ${docId} in project ${projectId}`
|
||||
)
|
||||
}
|
||||
|
||||
if (doc.inS3) {
|
||||
// skip the unarchiving to mongo when getting a doc
|
||||
const archivedDoc = await DocArchive.promises.getDoc(projectId, docId)
|
||||
Object.assign(doc, archivedDoc)
|
||||
}
|
||||
|
||||
return doc
|
||||
},
|
||||
|
||||
// get the doc from mongo if possible, or from the persistent store otherwise,
|
||||
// without unarchiving it (avoids unnecessary writes to mongo)
|
||||
async peekDoc(projectId, docId) {
|
||||
const doc = await DocManager._peekRawDoc(projectId, docId)
|
||||
await MongoManager.promises.checkRevUnchanged(doc)
|
||||
return doc
|
||||
},
|
||||
|
||||
async getDocLines(projectId, docId) {
|
||||
const doc = await DocManager._getDoc(projectId, docId, {
|
||||
lines: true,
|
||||
inS3: true,
|
||||
})
|
||||
return doc
|
||||
},
|
||||
|
||||
async getAllDeletedDocs(projectId, filter) {
|
||||
return await MongoManager.promises.getProjectsDeletedDocs(projectId, filter)
|
||||
},
|
||||
|
||||
async getAllNonDeletedDocs(projectId, filter) {
|
||||
await DocArchive.promises.unArchiveAllDocs(projectId)
|
||||
const docs = await MongoManager.promises.getProjectsDocs(
|
||||
projectId,
|
||||
{ include_deleted: false },
|
||||
filter
|
||||
)
|
||||
if (docs == null) {
|
||||
throw new Errors.NotFoundError(`No docs for project ${projectId}`)
|
||||
}
|
||||
return docs
|
||||
},
|
||||
|
||||
async projectHasRanges(projectId) {
|
||||
const docs = await MongoManager.promises.getProjectsDocs(
|
||||
projectId,
|
||||
{},
|
||||
{ _id: 1 }
|
||||
)
|
||||
const docIds = docs.map(doc => doc._id)
|
||||
for (const docId of docIds) {
|
||||
const doc = await DocManager.peekDoc(projectId, docId)
|
||||
if (
|
||||
(doc.ranges?.comments != null && doc.ranges.comments.length > 0) ||
|
||||
(doc.ranges?.changes != null && doc.ranges.changes.length > 0)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
},
|
||||
|
||||
async updateDoc(projectId, docId, lines, version, ranges) {
|
||||
const MAX_ATTEMPTS = 2
|
||||
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
|
||||
try {
|
||||
const { modified, rev } = await DocManager._tryUpdateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges
|
||||
)
|
||||
return { modified, rev }
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.DocRevValueError && attempt < MAX_ATTEMPTS) {
|
||||
// Another updateDoc call was racing with ours.
|
||||
// Retry once in a bit.
|
||||
logger.warn(
|
||||
{ projectId, docId, err },
|
||||
'detected concurrent updateDoc call'
|
||||
)
|
||||
await setTimeout(100 + Math.random() * 100)
|
||||
continue
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
async _tryUpdateDoc(projectId, docId, lines, version, ranges) {
|
||||
if (lines == null || version == null || ranges == null) {
|
||||
throw new Error('no lines, version or ranges provided')
|
||||
}
|
||||
|
||||
let doc
|
||||
try {
|
||||
doc = await DocManager._getDoc(projectId, docId, {
|
||||
version: true,
|
||||
rev: true,
|
||||
lines: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err instanceof Errors.NotFoundError) {
|
||||
doc = null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
ranges = RangeManager.jsonRangesToMongo(ranges)
|
||||
|
||||
let updateLines, updateRanges, updateVersion
|
||||
if (doc == null) {
|
||||
// If the document doesn't exist, we'll make sure to create/update all parts of it.
|
||||
updateLines = true
|
||||
updateVersion = true
|
||||
updateRanges = true
|
||||
} else {
|
||||
if (doc.version > version) {
|
||||
// Reject update when the version was decremented.
|
||||
// Potential reasons: racing flush, broken history.
|
||||
throw new Errors.DocVersionDecrementedError('rejecting stale update', {
|
||||
updateVersion: version,
|
||||
flushedVersion: doc.version,
|
||||
})
|
||||
}
|
||||
updateLines = !_.isEqual(doc.lines, lines)
|
||||
updateVersion = doc.version !== version
|
||||
updateRanges = RangeManager.shouldUpdateRanges(doc.ranges, ranges)
|
||||
}
|
||||
|
||||
let modified = false
|
||||
let rev = doc?.rev || 0
|
||||
|
||||
if (updateLines || updateRanges || updateVersion) {
|
||||
const update = {}
|
||||
if (updateLines) {
|
||||
update.lines = lines
|
||||
}
|
||||
if (updateRanges) {
|
||||
update.ranges = ranges
|
||||
}
|
||||
if (updateVersion) {
|
||||
update.version = version
|
||||
}
|
||||
logger.debug(
|
||||
{ projectId, docId, oldVersion: doc?.version, newVersion: version },
|
||||
'updating doc'
|
||||
)
|
||||
|
||||
if (updateLines || updateRanges) {
|
||||
rev += 1 // rev will be incremented in mongo by MongoManager.upsertIntoDocCollection
|
||||
}
|
||||
|
||||
modified = true
|
||||
await MongoManager.promises.upsertIntoDocCollection(
|
||||
projectId,
|
||||
docId,
|
||||
doc?.rev,
|
||||
update
|
||||
)
|
||||
} else {
|
||||
logger.debug({ projectId, docId }, 'doc has not changed - not updating')
|
||||
}
|
||||
|
||||
return { modified, rev }
|
||||
},
|
||||
|
||||
async patchDoc(projectId, docId, meta) {
|
||||
const projection = { _id: 1, deleted: true }
|
||||
const doc = await MongoManager.promises.findDoc(
|
||||
projectId,
|
||||
docId,
|
||||
projection
|
||||
)
|
||||
if (!doc) {
|
||||
throw new Errors.NotFoundError(
|
||||
`No such project/doc to delete: ${projectId}/${docId}`
|
||||
)
|
||||
}
|
||||
|
||||
if (meta.deleted && Settings.docstore.archiveOnSoftDelete) {
|
||||
// The user will not read this doc anytime soon. Flush it out of mongo.
|
||||
DocArchive.promises.archiveDoc(projectId, docId).catch(err => {
|
||||
logger.warn(
|
||||
{ projectId, docId, err },
|
||||
'archiving a single doc in the background failed'
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
await MongoManager.promises.patchDoc(projectId, docId, meta)
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
...callbackifyAll(DocManager, {
|
||||
multiResult: {
|
||||
updateDoc: ['modified', 'rev'],
|
||||
},
|
||||
}),
|
||||
promises: DocManager,
|
||||
}
|
19
services/docstore/app/js/Errors.js
Normal file
19
services/docstore/app/js/Errors.js
Normal file
@@ -0,0 +1,19 @@
|
||||
// import Errors from object-persistor to pass instanceof checks
|
||||
const OError = require('@overleaf/o-error')
|
||||
const { Errors } = require('@overleaf/object-persistor')
|
||||
|
||||
class Md5MismatchError extends OError {}
|
||||
|
||||
class DocModifiedError extends OError {}
|
||||
|
||||
class DocRevValueError extends OError {}
|
||||
|
||||
class DocVersionDecrementedError extends OError {}
|
||||
|
||||
module.exports = {
|
||||
Md5MismatchError,
|
||||
DocModifiedError,
|
||||
DocRevValueError,
|
||||
DocVersionDecrementedError,
|
||||
...Errors,
|
||||
}
|
67
services/docstore/app/js/HealthChecker.js
Normal file
67
services/docstore/app/js/HealthChecker.js
Normal file
@@ -0,0 +1,67 @@
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const request = require('request')
|
||||
const async = require('async')
|
||||
const _ = require('lodash')
|
||||
const crypto = require('node:crypto')
|
||||
const settings = require('@overleaf/settings')
|
||||
const { port } = settings.internal.docstore
|
||||
const logger = require('@overleaf/logger')
|
||||
|
||||
module.exports = {
|
||||
check(callback) {
|
||||
const docId = new ObjectId()
|
||||
const projectId = new ObjectId(settings.docstore.healthCheck.project_id)
|
||||
const url = `http://127.0.0.1:${port}/project/${projectId}/doc/${docId}`
|
||||
const lines = [
|
||||
'smoke test - delete me',
|
||||
`${crypto.randomBytes(32).toString('hex')}`,
|
||||
]
|
||||
const getOpts = () => ({
|
||||
url,
|
||||
timeout: 3000,
|
||||
})
|
||||
logger.debug({ lines, url, docId, projectId }, 'running health check')
|
||||
const jobs = [
|
||||
function (cb) {
|
||||
const opts = getOpts()
|
||||
opts.json = { lines, version: 42, ranges: {} }
|
||||
return request.post(opts, cb)
|
||||
},
|
||||
function (cb) {
|
||||
const opts = getOpts()
|
||||
opts.json = true
|
||||
return request.get(opts, function (err, res, body) {
|
||||
if (err != null) {
|
||||
logger.err({ err }, 'docstore returned a error in health check get')
|
||||
return cb(err)
|
||||
} else if (res == null) {
|
||||
return cb(new Error('no response from docstore with get check'))
|
||||
} else if ((res != null ? res.statusCode : undefined) !== 200) {
|
||||
return cb(new Error(`status code not 200, its ${res.statusCode}`))
|
||||
} else if (
|
||||
_.isEqual(body != null ? body.lines : undefined, lines) &&
|
||||
(body != null ? body._id : undefined) === docId.toString()
|
||||
) {
|
||||
return cb()
|
||||
} else {
|
||||
return cb(
|
||||
new Error(
|
||||
`health check lines not equal ${body.lines} != ${lines}`
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
},
|
||||
cb => db.docs.deleteOne({ _id: docId, project_id: projectId }, cb),
|
||||
]
|
||||
return async.series(jobs, callback)
|
||||
},
|
||||
}
|
319
services/docstore/app/js/HttpController.js
Normal file
319
services/docstore/app/js/HttpController.js
Normal file
@@ -0,0 +1,319 @@
|
||||
const DocManager = require('./DocManager')
|
||||
const logger = require('@overleaf/logger')
|
||||
const DocArchive = require('./DocArchiveManager')
|
||||
const HealthChecker = require('./HealthChecker')
|
||||
const Errors = require('./Errors')
|
||||
const Settings = require('@overleaf/settings')
|
||||
|
||||
function getDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
const includeDeleted = req.query.include_deleted === 'true'
|
||||
logger.debug({ projectId, docId }, 'getting doc')
|
||||
DocManager.getFullDoc(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
logger.debug({ docId, projectId }, 'got doc')
|
||||
if (doc == null) {
|
||||
res.sendStatus(404)
|
||||
} else if (doc.deleted && !includeDeleted) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
res.json(_buildDocView(doc))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function peekDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.debug({ projectId, docId }, 'peeking doc')
|
||||
DocManager.peekDoc(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
if (doc == null) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
res.setHeader('x-doc-status', doc.inS3 ? 'archived' : 'active')
|
||||
res.json(_buildDocView(doc))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function isDocDeleted(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
DocManager.isDocDeleted(projectId, docId, function (error, deleted) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json({ deleted })
|
||||
})
|
||||
}
|
||||
|
||||
function getRawDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.debug({ projectId, docId }, 'getting raw doc')
|
||||
DocManager.getDocLines(projectId, docId, function (error, doc) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
if (doc == null) {
|
||||
res.sendStatus(404)
|
||||
} else {
|
||||
res.setHeader('content-type', 'text/plain')
|
||||
res.send(_buildRawDocView(doc))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function getAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'getting all docs')
|
||||
DocManager.getAllNonDeletedDocs(
|
||||
projectId,
|
||||
{ lines: true, rev: true },
|
||||
function (error, docs) {
|
||||
if (docs == null) {
|
||||
docs = []
|
||||
}
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
const docViews = _buildDocsArrayView(projectId, docs)
|
||||
for (const docView of docViews) {
|
||||
if (!docView.lines) {
|
||||
logger.warn({ projectId, docId: docView._id }, 'missing doc lines')
|
||||
docView.lines = []
|
||||
}
|
||||
}
|
||||
res.json(docViews)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getAllDeletedDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'getting all deleted docs')
|
||||
DocManager.getAllDeletedDocs(
|
||||
projectId,
|
||||
{ name: true, deletedAt: true },
|
||||
function (error, docs) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json(
|
||||
docs.map(doc => ({
|
||||
_id: doc._id.toString(),
|
||||
name: doc.name,
|
||||
deletedAt: doc.deletedAt,
|
||||
}))
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function getAllRanges(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'getting all ranges')
|
||||
DocManager.getAllNonDeletedDocs(
|
||||
projectId,
|
||||
{ ranges: true },
|
||||
function (error, docs) {
|
||||
if (docs == null) {
|
||||
docs = []
|
||||
}
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json(_buildDocsArrayView(projectId, docs))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function projectHasRanges(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
DocManager.projectHasRanges(projectId, (err, projectHasRanges) => {
|
||||
if (err) {
|
||||
return next(err)
|
||||
}
|
||||
res.json({ projectHasRanges })
|
||||
})
|
||||
}
|
||||
|
||||
function updateDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
const lines = req.body?.lines
|
||||
const version = req.body?.version
|
||||
const ranges = req.body?.ranges
|
||||
|
||||
if (lines == null || !(lines instanceof Array)) {
|
||||
logger.error({ projectId, docId }, 'no doc lines provided')
|
||||
res.sendStatus(400) // Bad Request
|
||||
return
|
||||
}
|
||||
|
||||
if (version == null || typeof version !== 'number') {
|
||||
logger.error({ projectId, docId }, 'no doc version provided')
|
||||
res.sendStatus(400) // Bad Request
|
||||
return
|
||||
}
|
||||
|
||||
if (ranges == null) {
|
||||
logger.error({ projectId, docId }, 'no doc ranges provided')
|
||||
res.sendStatus(400) // Bad Request
|
||||
return
|
||||
}
|
||||
|
||||
const bodyLength = lines.reduce((len, line) => line.length + len, 0)
|
||||
if (bodyLength > Settings.max_doc_length) {
|
||||
logger.error({ projectId, docId, bodyLength }, 'document body too large')
|
||||
res.status(413).send('document body too large')
|
||||
return
|
||||
}
|
||||
|
||||
logger.debug({ projectId, docId }, 'got http request to update doc')
|
||||
DocManager.updateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
function (error, modified, rev) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.json({
|
||||
modified,
|
||||
rev,
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
function patchDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.debug({ projectId, docId }, 'patching doc')
|
||||
|
||||
const allowedFields = ['deleted', 'deletedAt', 'name']
|
||||
const meta = {}
|
||||
Object.entries(req.body).forEach(([field, value]) => {
|
||||
if (allowedFields.includes(field)) {
|
||||
meta[field] = value
|
||||
} else {
|
||||
logger.fatal({ field }, 'joi validation for pathDoc is broken')
|
||||
}
|
||||
})
|
||||
DocManager.patchDoc(projectId, docId, meta, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(204)
|
||||
})
|
||||
}
|
||||
|
||||
function _buildDocView(doc) {
|
||||
const docView = { _id: doc._id?.toString() }
|
||||
for (const attribute of ['lines', 'rev', 'version', 'ranges', 'deleted']) {
|
||||
if (doc[attribute] != null) {
|
||||
docView[attribute] = doc[attribute]
|
||||
}
|
||||
}
|
||||
return docView
|
||||
}
|
||||
|
||||
function _buildRawDocView(doc) {
|
||||
return (doc?.lines ?? []).join('\n')
|
||||
}
|
||||
|
||||
function _buildDocsArrayView(projectId, docs) {
|
||||
const docViews = []
|
||||
for (const doc of docs) {
|
||||
if (doc != null) {
|
||||
// There can end up being null docs for some reason :( (probably a race condition)
|
||||
docViews.push(_buildDocView(doc))
|
||||
} else {
|
||||
logger.error(
|
||||
{ err: new Error('null doc'), projectId },
|
||||
'encountered null doc'
|
||||
)
|
||||
}
|
||||
}
|
||||
return docViews
|
||||
}
|
||||
|
||||
function archiveAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'archiving all docs')
|
||||
DocArchive.archiveAllDocs(projectId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(204)
|
||||
})
|
||||
}
|
||||
|
||||
function archiveDoc(req, res, next) {
|
||||
const { doc_id: docId, project_id: projectId } = req.params
|
||||
logger.debug({ projectId, docId }, 'archiving a doc')
|
||||
DocArchive.archiveDoc(projectId, docId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(204)
|
||||
})
|
||||
}
|
||||
|
||||
function unArchiveAllDocs(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'unarchiving all docs')
|
||||
DocArchive.unArchiveAllDocs(projectId, function (err) {
|
||||
if (err) {
|
||||
if (err instanceof Errors.DocRevValueError) {
|
||||
logger.warn({ err }, 'Failed to unarchive doc')
|
||||
return res.sendStatus(409)
|
||||
}
|
||||
return next(err)
|
||||
}
|
||||
res.sendStatus(200)
|
||||
})
|
||||
}
|
||||
|
||||
function destroyProject(req, res, next) {
|
||||
const { project_id: projectId } = req.params
|
||||
logger.debug({ projectId }, 'destroying all docs')
|
||||
DocArchive.destroyProject(projectId, function (error) {
|
||||
if (error) {
|
||||
return next(error)
|
||||
}
|
||||
res.sendStatus(204)
|
||||
})
|
||||
}
|
||||
|
||||
function healthCheck(req, res) {
|
||||
HealthChecker.check(function (err) {
|
||||
if (err) {
|
||||
logger.err({ err }, 'error performing health check')
|
||||
res.sendStatus(500)
|
||||
} else {
|
||||
res.sendStatus(200)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getDoc,
|
||||
peekDoc,
|
||||
isDocDeleted,
|
||||
getRawDoc,
|
||||
getAllDocs,
|
||||
getAllDeletedDocs,
|
||||
getAllRanges,
|
||||
projectHasRanges,
|
||||
updateDoc,
|
||||
patchDoc,
|
||||
archiveAllDocs,
|
||||
archiveDoc,
|
||||
unArchiveAllDocs,
|
||||
destroyProject,
|
||||
healthCheck,
|
||||
}
|
274
services/docstore/app/js/MongoManager.js
Normal file
274
services/docstore/app/js/MongoManager.js
Normal file
@@ -0,0 +1,274 @@
|
||||
const { db, ObjectId } = require('./mongodb')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const Errors = require('./Errors')
|
||||
const { callbackify } = require('node:util')
|
||||
|
||||
const ARCHIVING_LOCK_DURATION_MS = Settings.archivingLockDurationMs
|
||||
|
||||
async function findDoc(projectId, docId, projection) {
|
||||
const doc = await db.docs.findOne(
|
||||
{
|
||||
_id: new ObjectId(docId.toString()),
|
||||
project_id: new ObjectId(projectId.toString()),
|
||||
},
|
||||
{ projection }
|
||||
)
|
||||
if (doc && projection.version && !doc.version) {
|
||||
doc.version = 0
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
async function getProjectsDeletedDocs(projectId, projection) {
|
||||
const docs = await db.docs
|
||||
.find(
|
||||
{
|
||||
project_id: new ObjectId(projectId.toString()),
|
||||
deleted: true,
|
||||
},
|
||||
{
|
||||
projection,
|
||||
sort: { deletedAt: -1 },
|
||||
limit: Settings.max_deleted_docs,
|
||||
}
|
||||
)
|
||||
.toArray()
|
||||
return docs
|
||||
}
|
||||
|
||||
async function getProjectsDocs(projectId, options, projection) {
|
||||
const query = { project_id: new ObjectId(projectId.toString()) }
|
||||
if (!options.include_deleted) {
|
||||
query.deleted = { $ne: true }
|
||||
}
|
||||
const queryOptions = {
|
||||
projection,
|
||||
}
|
||||
if (options.limit) {
|
||||
queryOptions.limit = options.limit
|
||||
}
|
||||
const docs = await db.docs.find(query, queryOptions).toArray()
|
||||
return docs
|
||||
}
|
||||
|
||||
async function getArchivedProjectDocs(projectId, maxResults) {
|
||||
const query = {
|
||||
project_id: new ObjectId(projectId.toString()),
|
||||
inS3: true,
|
||||
}
|
||||
const docs = await db.docs
|
||||
.find(query, { projection: { _id: 1 }, limit: maxResults })
|
||||
.toArray()
|
||||
return docs
|
||||
}
|
||||
|
||||
async function getNonArchivedProjectDocIds(projectId) {
|
||||
const docs = await db.docs
|
||||
.find(
|
||||
{
|
||||
project_id: new ObjectId(projectId),
|
||||
inS3: { $ne: true },
|
||||
},
|
||||
{ projection: { _id: 1 } }
|
||||
)
|
||||
.map(doc => doc._id)
|
||||
.toArray()
|
||||
return docs
|
||||
}
|
||||
|
||||
async function getNonDeletedArchivedProjectDocs(projectId, maxResults) {
|
||||
const query = {
|
||||
project_id: new ObjectId(projectId.toString()),
|
||||
deleted: { $ne: true },
|
||||
inS3: true,
|
||||
}
|
||||
const docs = await db.docs
|
||||
.find(query, { projection: { _id: 1 }, limit: maxResults })
|
||||
.toArray()
|
||||
return docs
|
||||
}
|
||||
|
||||
async function upsertIntoDocCollection(projectId, docId, previousRev, updates) {
|
||||
if (previousRev) {
|
||||
const update = {
|
||||
$set: updates,
|
||||
$unset: { inS3: true },
|
||||
}
|
||||
if (updates.lines || updates.ranges) {
|
||||
update.$inc = { rev: 1 }
|
||||
}
|
||||
const result = await db.docs.updateOne(
|
||||
{
|
||||
_id: new ObjectId(docId),
|
||||
project_id: new ObjectId(projectId),
|
||||
rev: previousRev,
|
||||
},
|
||||
update
|
||||
)
|
||||
if (result.matchedCount !== 1) {
|
||||
throw new Errors.DocRevValueError()
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
await db.docs.insertOne({
|
||||
_id: new ObjectId(docId),
|
||||
project_id: new ObjectId(projectId),
|
||||
rev: 1,
|
||||
...updates,
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 11000) {
|
||||
// duplicate doc _id
|
||||
throw new Errors.DocRevValueError()
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function patchDoc(projectId, docId, meta) {
|
||||
await db.docs.updateOne(
|
||||
{
|
||||
_id: new ObjectId(docId),
|
||||
project_id: new ObjectId(projectId),
|
||||
},
|
||||
{ $set: meta }
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch a doc and lock it for archiving
|
||||
*
|
||||
* This will return null if the doc is not found, if it's already archived or
|
||||
* if the lock can't be acquired.
|
||||
*/
|
||||
async function getDocForArchiving(projectId, docId) {
|
||||
const archivingUntil = new Date(Date.now() + ARCHIVING_LOCK_DURATION_MS)
|
||||
const result = await db.docs.findOneAndUpdate(
|
||||
{
|
||||
_id: new ObjectId(docId),
|
||||
project_id: new ObjectId(projectId),
|
||||
inS3: { $ne: true },
|
||||
$or: [{ archivingUntil: null }, { archivingUntil: { $lt: new Date() } }],
|
||||
},
|
||||
{ $set: { archivingUntil } },
|
||||
{
|
||||
projection: { lines: 1, ranges: 1, rev: 1 },
|
||||
includeResultMetadata: true,
|
||||
}
|
||||
)
|
||||
return result.value
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the doc contents from Mongo and release the archiving lock
|
||||
*/
|
||||
async function markDocAsArchived(projectId, docId, rev) {
|
||||
await db.docs.updateOne(
|
||||
{ _id: new ObjectId(docId), rev },
|
||||
{
|
||||
$set: { inS3: true },
|
||||
$unset: { lines: 1, ranges: 1, archivingUntil: 1 },
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore an archived doc
|
||||
*
|
||||
* This checks that the archived doc's rev matches.
|
||||
*/
|
||||
async function restoreArchivedDoc(projectId, docId, archivedDoc) {
|
||||
const query = {
|
||||
_id: new ObjectId(docId),
|
||||
project_id: new ObjectId(projectId),
|
||||
rev: archivedDoc.rev,
|
||||
}
|
||||
const update = {
|
||||
$set: {
|
||||
lines: archivedDoc.lines,
|
||||
ranges: archivedDoc.ranges || {},
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
const result = await db.docs.updateOne(query, update)
|
||||
|
||||
if (result.matchedCount === 0) {
|
||||
throw new Errors.DocRevValueError('failed to unarchive doc', {
|
||||
docId,
|
||||
rev: archivedDoc.rev,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function getDocRev(docId) {
|
||||
const doc = await db.docs.findOne(
|
||||
{ _id: new ObjectId(docId.toString()) },
|
||||
{ projection: { rev: 1 } }
|
||||
)
|
||||
return doc && doc.rev
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to support optimistic locking.
|
||||
*
|
||||
* Check that the rev of an existing doc is unchanged. If the rev has
|
||||
* changed, return a DocModifiedError.
|
||||
*/
|
||||
async function checkRevUnchanged(doc) {
|
||||
const currentRev = await getDocRev(doc._id)
|
||||
if (isNaN(currentRev) || isNaN(doc.rev)) {
|
||||
throw new Errors.DocRevValueError('doc rev is NaN', {
|
||||
doc_id: doc._id,
|
||||
rev: doc.rev,
|
||||
currentRev,
|
||||
})
|
||||
}
|
||||
if (doc.rev !== currentRev) {
|
||||
throw new Errors.DocModifiedError('doc rev has changed', {
|
||||
doc_id: doc._id,
|
||||
rev: doc.rev,
|
||||
currentRev,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function destroyProject(projectId) {
|
||||
await db.docs.deleteMany({ project_id: new ObjectId(projectId) })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
findDoc: callbackify(findDoc),
|
||||
getProjectsDeletedDocs: callbackify(getProjectsDeletedDocs),
|
||||
getProjectsDocs: callbackify(getProjectsDocs),
|
||||
getArchivedProjectDocs: callbackify(getArchivedProjectDocs),
|
||||
getNonArchivedProjectDocIds: callbackify(getNonArchivedProjectDocIds),
|
||||
getNonDeletedArchivedProjectDocs: callbackify(
|
||||
getNonDeletedArchivedProjectDocs
|
||||
),
|
||||
upsertIntoDocCollection: callbackify(upsertIntoDocCollection),
|
||||
restoreArchivedDoc: callbackify(restoreArchivedDoc),
|
||||
patchDoc: callbackify(patchDoc),
|
||||
getDocForArchiving: callbackify(getDocForArchiving),
|
||||
markDocAsArchived: callbackify(markDocAsArchived),
|
||||
checkRevUnchanged: callbackify(checkRevUnchanged),
|
||||
destroyProject: callbackify(destroyProject),
|
||||
promises: {
|
||||
findDoc,
|
||||
getProjectsDeletedDocs,
|
||||
getProjectsDocs,
|
||||
getArchivedProjectDocs,
|
||||
getNonArchivedProjectDocIds,
|
||||
getNonDeletedArchivedProjectDocs,
|
||||
upsertIntoDocCollection,
|
||||
restoreArchivedDoc,
|
||||
patchDoc,
|
||||
getDocForArchiving,
|
||||
markDocAsArchived,
|
||||
checkRevUnchanged,
|
||||
destroyProject,
|
||||
},
|
||||
}
|
12
services/docstore/app/js/PersistorManager.js
Normal file
12
services/docstore/app/js/PersistorManager.js
Normal file
@@ -0,0 +1,12 @@
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
const persistorSettings = settings.docstore
|
||||
persistorSettings.Metrics = require('@overleaf/metrics')
|
||||
|
||||
const ObjectPersistor = require('@overleaf/object-persistor')
|
||||
const AbstractPersistor = require('@overleaf/object-persistor/src/AbstractPersistor')
|
||||
const persistor = settings.docstore.backend
|
||||
? ObjectPersistor(persistorSettings)
|
||||
: new AbstractPersistor()
|
||||
|
||||
module.exports = persistor
|
68
services/docstore/app/js/RangeManager.js
Normal file
68
services/docstore/app/js/RangeManager.js
Normal file
@@ -0,0 +1,68 @@
|
||||
/* eslint-disable
|
||||
no-return-assign,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
let RangeManager
|
||||
const _ = require('lodash')
|
||||
const { ObjectId } = require('./mongodb')
|
||||
|
||||
module.exports = RangeManager = {
|
||||
shouldUpdateRanges(docRanges, incomingRanges) {
|
||||
if (incomingRanges == null) {
|
||||
throw new Error('expected incoming_ranges')
|
||||
}
|
||||
|
||||
// If the ranges are empty, we don't store them in the DB, so set
|
||||
// doc_ranges to an empty object as default, since this is was the
|
||||
// incoming_ranges will be for an empty range set.
|
||||
if (docRanges == null) {
|
||||
docRanges = {}
|
||||
}
|
||||
|
||||
return !_.isEqual(docRanges, incomingRanges)
|
||||
},
|
||||
|
||||
jsonRangesToMongo(ranges) {
|
||||
if (ranges == null) {
|
||||
return null
|
||||
}
|
||||
|
||||
const updateMetadata = function (metadata) {
|
||||
if ((metadata != null ? metadata.ts : undefined) != null) {
|
||||
metadata.ts = new Date(metadata.ts)
|
||||
}
|
||||
if ((metadata != null ? metadata.user_id : undefined) != null) {
|
||||
return (metadata.user_id = RangeManager._safeObjectId(metadata.user_id))
|
||||
}
|
||||
}
|
||||
|
||||
for (const change of Array.from(ranges.changes || [])) {
|
||||
change.id = RangeManager._safeObjectId(change.id)
|
||||
updateMetadata(change.metadata)
|
||||
}
|
||||
for (const comment of Array.from(ranges.comments || [])) {
|
||||
comment.id = RangeManager._safeObjectId(comment.id)
|
||||
if ((comment.op != null ? comment.op.t : undefined) != null) {
|
||||
comment.op.t = RangeManager._safeObjectId(comment.op.t)
|
||||
}
|
||||
updateMetadata(comment.metadata)
|
||||
}
|
||||
return ranges
|
||||
},
|
||||
|
||||
_safeObjectId(data) {
|
||||
try {
|
||||
return new ObjectId(data)
|
||||
} catch (error) {
|
||||
return data
|
||||
}
|
||||
},
|
||||
}
|
28
services/docstore/app/js/StreamToBuffer.js
Normal file
28
services/docstore/app/js/StreamToBuffer.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const { LoggerStream, WritableBuffer } = require('@overleaf/stream-utils')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const logger = require('@overleaf/logger/logging-manager')
|
||||
const { pipeline } = require('node:stream/promises')
|
||||
const { callbackify } = require('node:util')
|
||||
|
||||
module.exports = {
|
||||
streamToBuffer: callbackify(streamToBuffer),
|
||||
promises: {
|
||||
streamToBuffer,
|
||||
},
|
||||
}
|
||||
|
||||
async function streamToBuffer(projectId, docId, stream) {
|
||||
const loggerTransform = new LoggerStream(
|
||||
Settings.max_doc_length,
|
||||
(size, isFlush) => {
|
||||
logger.warn(
|
||||
{ projectId, docId, size, finishedReading: isFlush },
|
||||
'potentially large doc pulled down from gcs'
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
const buffer = new WritableBuffer()
|
||||
await pipeline(stream, loggerTransform, buffer)
|
||||
return buffer.contents()
|
||||
}
|
18
services/docstore/app/js/mongodb.js
Normal file
18
services/docstore/app/js/mongodb.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const Metrics = require('@overleaf/metrics')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { MongoClient, ObjectId } = require('mongodb-legacy')
|
||||
|
||||
const mongoClient = new MongoClient(Settings.mongo.url, Settings.mongo.options)
|
||||
const mongoDb = mongoClient.db()
|
||||
|
||||
const db = {
|
||||
docs: mongoDb.collection('docs'),
|
||||
}
|
||||
|
||||
Metrics.mongodb.monitor(mongoClient)
|
||||
|
||||
module.exports = {
|
||||
db,
|
||||
mongoClient,
|
||||
ObjectId,
|
||||
}
|
9
services/docstore/buildscript.txt
Normal file
9
services/docstore/buildscript.txt
Normal file
@@ -0,0 +1,9 @@
|
||||
docstore
|
||||
--dependencies=mongo,gcs
|
||||
--docker-repos=us-east1-docker.pkg.dev/overleaf-ops/ol-docker
|
||||
--env-add=
|
||||
--env-pass-through=
|
||||
--esmock-loader=False
|
||||
--node-version=20.18.2
|
||||
--public-repo=True
|
||||
--script-version=4.7.0
|
92
services/docstore/config/settings.defaults.js
Normal file
92
services/docstore/config/settings.defaults.js
Normal file
@@ -0,0 +1,92 @@
|
||||
const http = require('node:http')
|
||||
const https = require('node:https')
|
||||
|
||||
http.globalAgent.maxSockets = 300
|
||||
http.globalAgent.keepAlive = false
|
||||
https.globalAgent.keepAlive = false
|
||||
|
||||
const Settings = {
|
||||
internal: {
|
||||
docstore: {
|
||||
port: 3016,
|
||||
host: process.env.LISTEN_ADDRESS || '127.0.0.1',
|
||||
},
|
||||
},
|
||||
|
||||
mongo: {
|
||||
options: {
|
||||
monitorCommands: true,
|
||||
},
|
||||
},
|
||||
|
||||
docstore: {
|
||||
archiveOnSoftDelete: process.env.ARCHIVE_ON_SOFT_DELETE === 'true',
|
||||
keepSoftDeletedDocsArchived:
|
||||
process.env.KEEP_SOFT_DELETED_DOCS_ARCHIVED === 'true',
|
||||
|
||||
backend: process.env.BACKEND,
|
||||
healthCheck: {
|
||||
project_id: process.env.HEALTH_CHECK_PROJECT_ID,
|
||||
},
|
||||
bucket: process.env.BUCKET_NAME || process.env.AWS_BUCKET || 'bucket',
|
||||
gcs: {
|
||||
unlockBeforeDelete: process.env.GCS_UNLOCK_BEFORE_DELETE === 'true',
|
||||
deletedBucketSuffix: process.env.GCS_DELETED_BUCKET_SUFFIX,
|
||||
deleteConcurrency: parseInt(process.env.GCS_DELETE_CONCURRENCY) || 50,
|
||||
},
|
||||
},
|
||||
|
||||
max_deleted_docs: parseInt(process.env.MAX_DELETED_DOCS, 10) || 2000,
|
||||
|
||||
max_doc_length: parseInt(process.env.MAX_DOC_LENGTH) || 2 * 1024 * 1024, // 2mb
|
||||
|
||||
maxJsonRequestSize:
|
||||
parseInt(process.env.MAX_JSON_REQUEST_SIZE) || 6 * 1024 * 1024, // 6 MB
|
||||
|
||||
unArchiveBatchSize: parseInt(process.env.UN_ARCHIVE_BATCH_SIZE, 10) || 50,
|
||||
parallelArchiveJobs: parseInt(process.env.PARALLEL_ARCHIVE_JOBS, 10) || 5,
|
||||
archivingLockDurationMs:
|
||||
parseInt(process.env.ARCHIVING_LOCK_DURATION_MS, 10) || 60000,
|
||||
}
|
||||
|
||||
if (process.env.MONGO_CONNECTION_STRING) {
|
||||
Settings.mongo.url = process.env.MONGO_CONNECTION_STRING
|
||||
} else if (process.env.MONGO_HOST) {
|
||||
Settings.mongo.url = `mongodb://${process.env.MONGO_HOST}/sharelatex`
|
||||
} else {
|
||||
Settings.mongo.url = 'mongodb://127.0.0.1/sharelatex'
|
||||
}
|
||||
|
||||
if (
|
||||
process.env.AWS_ACCESS_KEY_ID &&
|
||||
process.env.AWS_SECRET_ACCESS_KEY &&
|
||||
process.env.AWS_BUCKET
|
||||
) {
|
||||
Settings.docstore.s3 = {
|
||||
key: process.env.AWS_ACCESS_KEY_ID,
|
||||
secret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
bucket: process.env.AWS_BUCKET,
|
||||
endpoint: process.env.AWS_S3_ENDPOINT,
|
||||
pathStyle: process.env.AWS_S3_PATH_STYLE,
|
||||
partSize: parseInt(process.env.AWS_S3_PARTSIZE) || 100 * 1024 * 1024,
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.GCS_API_ENDPOINT) {
|
||||
Settings.docstore.gcs.endpoint = {
|
||||
apiEndpoint: process.env.GCS_API_ENDPOINT,
|
||||
projectId: process.env.GCS_PROJECT_ID,
|
||||
}
|
||||
}
|
||||
|
||||
if (process.env.FALLBACK_BACKEND) {
|
||||
Settings.docstore.fallback = {
|
||||
backend: process.env.FALLBACK_BACKEND,
|
||||
// mapping of bucket names on the fallback, to bucket names on the primary.
|
||||
// e.g. { myS3UserFilesBucketName: 'myGoogleUserFilesBucketName' }
|
||||
buckets: JSON.parse(process.env.FALLBACK_BUCKET_MAPPING || '{}'),
|
||||
copyOnMiss: process.env.COPY_ON_MISS === 'true',
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Settings
|
64
services/docstore/docker-compose.ci.yml
Normal file
64
services/docstore/docker-compose.ci.yml
Normal file
@@ -0,0 +1,64 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
user: node
|
||||
command: npm run test:unit:_run
|
||||
environment:
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
|
||||
|
||||
test_acceptance:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
GCS_API_ENDPOINT: http://gcs:9090
|
||||
GCS_PROJECT_ID: fake
|
||||
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
gcs:
|
||||
condition: service_healthy
|
||||
user: node
|
||||
command: npm run test:acceptance
|
||||
|
||||
|
||||
tar:
|
||||
build: .
|
||||
image: ci/$PROJECT_NAME:$BRANCH_NAME-$BUILD_NUMBER
|
||||
volumes:
|
||||
- ./:/tmp/build/
|
||||
command: tar -czf /tmp/build/build.tar.gz --exclude=build.tar.gz --exclude-vcs .
|
||||
user: root
|
||||
mongo:
|
||||
image: mongo:6.0.13
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
environment:
|
||||
MONGO_INITDB_DATABASE: sharelatex
|
||||
extra_hosts:
|
||||
# Required when using the automatic database setup for initializing the
|
||||
# replica set. This override is not needed when running the setup after
|
||||
# starting up mongo.
|
||||
- mongo:127.0.0.1
|
||||
gcs:
|
||||
image: fsouza/fake-gcs-server:1.45.2
|
||||
command: ["--port=9090", "--scheme=http"]
|
||||
healthcheck:
|
||||
test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b
|
||||
interval: 1s
|
||||
retries: 20
|
68
services/docstore/docker-compose.yml
Normal file
68
services/docstore/docker-compose.yml
Normal file
@@ -0,0 +1,68 @@
|
||||
# This file was auto-generated, do not edit it directly.
|
||||
# Instead run bin/update_build_scripts from
|
||||
# https://github.com/overleaf/internal/
|
||||
|
||||
version: "2.3"
|
||||
|
||||
services:
|
||||
test_unit:
|
||||
image: node:20.18.2
|
||||
volumes:
|
||||
- .:/overleaf/services/docstore
|
||||
- ../../node_modules:/overleaf/node_modules
|
||||
- ../../libraries:/overleaf/libraries
|
||||
working_dir: /overleaf/services/docstore
|
||||
environment:
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
command: npm run --silent test:unit
|
||||
user: node
|
||||
|
||||
test_acceptance:
|
||||
image: node:20.18.2
|
||||
volumes:
|
||||
- .:/overleaf/services/docstore
|
||||
- ../../node_modules:/overleaf/node_modules
|
||||
- ../../libraries:/overleaf/libraries
|
||||
working_dir: /overleaf/services/docstore
|
||||
environment:
|
||||
ELASTIC_SEARCH_DSN: es:9200
|
||||
MONGO_HOST: mongo
|
||||
POSTGRES_HOST: postgres
|
||||
GCS_API_ENDPOINT: http://gcs:9090
|
||||
GCS_PROJECT_ID: fake
|
||||
STORAGE_EMULATOR_HOST: http://gcs:9090/storage/v1
|
||||
MOCHA_GREP: ${MOCHA_GREP}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-}
|
||||
NODE_ENV: test
|
||||
NODE_OPTIONS: "--unhandled-rejections=strict"
|
||||
user: node
|
||||
depends_on:
|
||||
mongo:
|
||||
condition: service_started
|
||||
gcs:
|
||||
condition: service_healthy
|
||||
command: npm run --silent test:acceptance
|
||||
|
||||
mongo:
|
||||
image: mongo:6.0.13
|
||||
command: --replSet overleaf
|
||||
volumes:
|
||||
- ../../bin/shared/mongodb-init-replica-set.js:/docker-entrypoint-initdb.d/mongodb-init-replica-set.js
|
||||
environment:
|
||||
MONGO_INITDB_DATABASE: sharelatex
|
||||
extra_hosts:
|
||||
# Required when using the automatic database setup for initializing the
|
||||
# replica set. This override is not needed when running the setup after
|
||||
# starting up mongo.
|
||||
- mongo:127.0.0.1
|
||||
|
||||
gcs:
|
||||
image: fsouza/fake-gcs-server:1.45.2
|
||||
command: ["--port=9090", "--scheme=http"]
|
||||
healthcheck:
|
||||
test: wget --quiet --output-document=/dev/null http://localhost:9090/storage/v1/b
|
||||
interval: 1s
|
||||
retries: 20
|
47
services/docstore/package.json
Normal file
47
services/docstore/package.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"name": "@overleaf/docstore",
|
||||
"description": "A CRUD API for handling text documents in projects",
|
||||
"private": true,
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
"start": "node app.js",
|
||||
"test:acceptance:_run": "mocha --recursive --reporter spec --timeout 15000 --exit $@ test/acceptance/js",
|
||||
"test:acceptance": "npm run test:acceptance:_run -- --grep=$MOCHA_GREP",
|
||||
"test:unit:_run": "mocha --recursive --reporter spec $@ test/unit/js",
|
||||
"test:unit": "npm run test:unit:_run -- --grep=$MOCHA_GREP",
|
||||
"nodemon": "node --watch app.js",
|
||||
"lint": "eslint --max-warnings 0 --format unix .",
|
||||
"format": "prettier --list-different $PWD/'**/*.*js'",
|
||||
"format:fix": "prettier --write $PWD/'**/*.*js'",
|
||||
"lint:fix": "eslint --fix .",
|
||||
"types:check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@overleaf/logger": "*",
|
||||
"@overleaf/metrics": "*",
|
||||
"@overleaf/o-error": "*",
|
||||
"@overleaf/object-persistor": "*",
|
||||
"@overleaf/promise-utils": "*",
|
||||
"@overleaf/settings": "*",
|
||||
"@overleaf/stream-utils": "^0.1.0",
|
||||
"async": "^3.2.5",
|
||||
"body-parser": "^1.20.3",
|
||||
"bunyan": "^1.8.15",
|
||||
"celebrate": "^15.0.3",
|
||||
"express": "^4.21.2",
|
||||
"lodash": "^4.17.21",
|
||||
"mongodb-legacy": "6.1.3",
|
||||
"p-map": "^4.0.0",
|
||||
"request": "^2.88.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@google-cloud/storage": "^6.10.1",
|
||||
"chai": "^4.3.6",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"mocha": "^11.1.0",
|
||||
"sandboxed-module": "~2.0.4",
|
||||
"sinon": "~9.0.2",
|
||||
"sinon-chai": "^3.7.0",
|
||||
"typescript": "^5.0.4"
|
||||
}
|
||||
}
|
@@ -0,0 +1,5 @@
|
||||
FROM fsouza/fake-gcs-server:latest
|
||||
RUN apk add --update --no-cache curl
|
||||
COPY healthcheck.sh /healthcheck.sh
|
||||
HEALTHCHECK --interval=1s --timeout=1s --retries=30 CMD /healthcheck.sh http://localhost:9090
|
||||
CMD ["--port=9090", "--scheme=http"]
|
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
9
services/docstore/test/acceptance/deps/healthcheck.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
|
||||
# health check to allow 404 status code as valid
|
||||
STATUSCODE=$(curl --silent --output /dev/null --write-out "%{http_code}" "$1")
|
||||
# will be 000 on non-http error (e.g. connection failure)
|
||||
if test "$STATUSCODE" -ge 500 || test "$STATUSCODE" -lt 200; then
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
1239
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
1239
services/docstore/test/acceptance/js/ArchiveDocsTests.js
Normal file
File diff suppressed because it is too large
Load Diff
511
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
511
services/docstore/test/acceptance/js/DeletingDocsTests.js
Normal file
@@ -0,0 +1,511 @@
|
||||
const { db, ObjectId } = require('../../../app/js/mongodb')
|
||||
const { expect } = require('chai')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
function deleteTestSuite(deleteDoc) {
|
||||
before(async function () {
|
||||
// Create buckets needed by the archiving part of these tests
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.createBucket(Settings.docstore.bucket)
|
||||
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
// Tear down the buckets created above
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.bucket(Settings.docstore.bucket).deleteFiles()
|
||||
await storage.bucket(Settings.docstore.bucket).delete()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete()
|
||||
})
|
||||
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = []
|
||||
DocstoreApp.ensureRunning(() => {
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should show as not deleted on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.have.property('deleted').to.equal(false)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function (done) {
|
||||
deleteDoc(this.project_id, this.doc_id, (error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(function (done) {
|
||||
db.docs.deleteOne({ _id: this.doc_id }, done)
|
||||
})
|
||||
|
||||
it('should mark the doc as deleted on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(200)
|
||||
expect(body).to.have.property('deleted').to.equal(true)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert a deleted doc into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
docs[0]._id.should.deep.equal(this.doc_id)
|
||||
docs[0].lines.should.deep.equal(this.lines)
|
||||
docs[0].deleted.should.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not export the doc to s3', function (done) {
|
||||
setTimeout(() => {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
done()
|
||||
})
|
||||
}, 1000)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when archiveOnSoftDelete is enabled', function () {
|
||||
let archiveOnSoftDelete
|
||||
beforeEach('overwrite settings', function () {
|
||||
archiveOnSoftDelete = Settings.docstore.archiveOnSoftDelete
|
||||
Settings.docstore.archiveOnSoftDelete = true
|
||||
})
|
||||
afterEach('restore settings', function () {
|
||||
Settings.docstore.archiveOnSoftDelete = archiveOnSoftDelete
|
||||
})
|
||||
|
||||
beforeEach('delete Doc', function (done) {
|
||||
deleteDoc(this.project_id, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(function waitForBackgroundFlush(done) {
|
||||
setTimeout(done, 500)
|
||||
})
|
||||
|
||||
afterEach(function cleanupDoc(done) {
|
||||
db.docs.deleteOne({ _id: this.doc_id }, done)
|
||||
})
|
||||
|
||||
it('should set the deleted flag in the doc', function (done) {
|
||||
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(doc.deleted).to.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should set inS3 and unset lines and ranges in the doc', function (done) {
|
||||
db.docs.findOne({ _id: this.doc_id }, (error, doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(doc.lines).to.not.exist
|
||||
expect(doc.ranges).to.not.exist
|
||||
expect(doc.inS3).to.equal(true)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should set the doc in s3 correctly', function (done) {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, (error, s3doc) => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
expect(s3doc.lines).to.deep.equal(this.lines)
|
||||
expect(s3doc.ranges).to.deep.equal(this.ranges)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists in another project', function () {
|
||||
const otherProjectId = new ObjectId()
|
||||
|
||||
it('should show as not existing on /deleted', function (done) {
|
||||
DocstoreClient.isDocDeleted(otherProjectId, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should return a 404 when trying to delete', function (done) {
|
||||
deleteDoc(otherProjectId, this.doc_id, (error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
it('should show as not existing on /deleted', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
DocstoreClient.isDocDeleted(
|
||||
this.project_id,
|
||||
missingDocId,
|
||||
(error, res) => {
|
||||
if (error) return done(error)
|
||||
expect(res.statusCode).to.equal(404)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 404', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
deleteDoc(this.project_id, missingDocId, (error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe('Delete via PATCH', function () {
|
||||
deleteTestSuite(DocstoreClient.deleteDoc)
|
||||
|
||||
describe('when providing a custom doc name in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithName(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'wombat.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert the doc name into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
expect(docs[0].name).to.equal('wombat.tex')
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing a custom deletedAt date in the delete request', function () {
|
||||
beforeEach('record date and delay', function (done) {
|
||||
this.deletedAt = new Date()
|
||||
setTimeout(done, 5)
|
||||
})
|
||||
|
||||
beforeEach('perform deletion with past date', function (done) {
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.deletedAt,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should insert the date into the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((error, docs) => {
|
||||
if (error) return done(error)
|
||||
expect(docs[0].deletedAt.toISOString()).to.equal(
|
||||
this.deletedAt.toISOString()
|
||||
)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing no doc name in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithName(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'',
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
done(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should reject the request', function () {
|
||||
expect(this.res.statusCode).to.equal(400)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when providing no date in the delete request', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
'',
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
done(error)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should reject the request', function () {
|
||||
expect(this.res.statusCode).to.equal(400)
|
||||
})
|
||||
})
|
||||
|
||||
describe('before deleting anything', function () {
|
||||
it('should show nothing in deleted docs response', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
expect(deletedDocs).to.deep.equal([])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc gets a name on delete', function () {
|
||||
beforeEach(function (done) {
|
||||
this.deletedAt = new Date()
|
||||
DocstoreClient.deleteDocWithDate(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.deletedAt,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should show the doc in deleted docs response', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id.toString(),
|
||||
name: 'main.tex',
|
||||
deletedAt: this.deletedAt.toISOString(),
|
||||
},
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('after deleting multiple docs', function () {
|
||||
beforeEach('create doc2', function (done) {
|
||||
this.doc_id2 = new ObjectId()
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id2,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('delete doc2', function (done) {
|
||||
this.deletedAt2 = new Date()
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
this.project_id,
|
||||
this.doc_id2,
|
||||
this.deletedAt2,
|
||||
'two.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('create doc3', function (done) {
|
||||
this.doc_id3 = new ObjectId()
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id3,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
beforeEach('delete doc3', function (done) {
|
||||
this.deletedAt3 = new Date()
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
this.project_id,
|
||||
this.doc_id3,
|
||||
this.deletedAt3,
|
||||
'three.tex',
|
||||
done
|
||||
)
|
||||
})
|
||||
it('should show all the docs as deleted', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id3.toString(),
|
||||
name: 'three.tex',
|
||||
deletedAt: this.deletedAt3.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id2.toString(),
|
||||
name: 'two.tex',
|
||||
deletedAt: this.deletedAt2.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id.toString(),
|
||||
name: 'main.tex',
|
||||
deletedAt: this.deletedAt.toISOString(),
|
||||
},
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('with one more than max_deleted_docs permits', function () {
|
||||
let maxDeletedDocsBefore
|
||||
beforeEach(function () {
|
||||
maxDeletedDocsBefore = Settings.max_deleted_docs
|
||||
Settings.max_deleted_docs = 2
|
||||
})
|
||||
afterEach(function () {
|
||||
Settings.max_deleted_docs = maxDeletedDocsBefore
|
||||
})
|
||||
|
||||
it('should omit the first deleted doc', function (done) {
|
||||
DocstoreClient.getAllDeletedDocs(
|
||||
this.project_id,
|
||||
(error, deletedDocs) => {
|
||||
if (error) return done(error)
|
||||
|
||||
expect(deletedDocs).to.deep.equal([
|
||||
{
|
||||
_id: this.doc_id3.toString(),
|
||||
name: 'three.tex',
|
||||
deletedAt: this.deletedAt3.toISOString(),
|
||||
},
|
||||
{
|
||||
_id: this.doc_id2.toString(),
|
||||
name: 'two.tex',
|
||||
deletedAt: this.deletedAt2.toISOString(),
|
||||
},
|
||||
// dropped main.tex
|
||||
])
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("Destroying a project's documents", function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = []
|
||||
DocstoreApp.ensureRunning(() => {
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||
})
|
||||
|
||||
it('should remove the doc from the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||
expect(err).not.to.exist
|
||||
expect(docs).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.archiveAllDoc(this.project_id, err => {
|
||||
if (err) {
|
||||
return done(err)
|
||||
}
|
||||
DocstoreClient.destroyAllDoc(this.project_id, done)
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove the doc from the docs collection', function (done) {
|
||||
db.docs.find({ _id: this.doc_id }).toArray((err, docs) => {
|
||||
expect(err).not.to.exist
|
||||
expect(docs).to.deep.equal([])
|
||||
done()
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove the doc contents from s3', function (done) {
|
||||
DocstoreClient.getS3Doc(this.project_id, this.doc_id, error => {
|
||||
expect(error).to.be.instanceOf(Errors.NotFoundError)
|
||||
done()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
112
services/docstore/test/acceptance/js/GettingAllDocsTests.js
Normal file
@@ -0,0 +1,112 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS101: Remove unnecessary use of Array.from
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const async = require('async')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Getting all docs', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['one', 'two', 'three'],
|
||||
ranges: { mock: 'one' },
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['aaa', 'bbb', 'ccc'],
|
||||
ranges: { mock: 'two' },
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['111', '222', '333'],
|
||||
ranges: { mock: 'three' },
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
this.deleted_doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['deleted'],
|
||||
ranges: { mock: 'four' },
|
||||
rev: 8,
|
||||
}
|
||||
const version = 42
|
||||
const jobs = Array.from(this.docs).map(doc =>
|
||||
(doc => {
|
||||
return callback => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
doc._id,
|
||||
doc.lines,
|
||||
version,
|
||||
doc.ranges,
|
||||
callback
|
||||
)
|
||||
}
|
||||
})(doc)
|
||||
)
|
||||
jobs.push(cb => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc._id,
|
||||
this.deleted_doc.lines,
|
||||
version,
|
||||
this.deleted_doc.ranges,
|
||||
err => {
|
||||
if (err) return done(err)
|
||||
return DocstoreClient.deleteDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc._id,
|
||||
cb
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
jobs.unshift(cb => DocstoreApp.ensureRunning(cb))
|
||||
return async.series(jobs, done)
|
||||
})
|
||||
|
||||
it('getAllDocs should return all the (non-deleted) docs', function (done) {
|
||||
return DocstoreClient.getAllDocs(this.project_id, (error, res, docs) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
docs.length.should.equal(this.docs.length)
|
||||
for (let i = 0; i < docs.length; i++) {
|
||||
const doc = docs[i]
|
||||
doc.lines.should.deep.equal(this.docs[i].lines)
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
|
||||
return it('getAllRanges should return all the (non-deleted) doc ranges', function (done) {
|
||||
return DocstoreClient.getAllRanges(this.project_id, (error, res, docs) => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
docs.length.should.equal(this.docs.length)
|
||||
for (let i = 0; i < docs.length; i++) {
|
||||
const doc = docs[i]
|
||||
doc.ranges.should.deep.equal(this.docs[i].ranges)
|
||||
}
|
||||
return done()
|
||||
})
|
||||
})
|
||||
})
|
@@ -0,0 +1,139 @@
|
||||
const Settings = require('@overleaf/settings')
|
||||
const { ObjectId } = require('../../../app/js/mongodb')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
const { Storage } = require('@google-cloud/storage')
|
||||
|
||||
describe('Getting A Doc from Archive', function () {
|
||||
before(function (done) {
|
||||
return DocstoreApp.ensureRunning(done)
|
||||
})
|
||||
|
||||
before(async function () {
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.createBucket(Settings.docstore.bucket)
|
||||
await storage.createBucket(`${Settings.docstore.bucket}-deleted`)
|
||||
})
|
||||
|
||||
after(async function () {
|
||||
// Tear down the buckets created above
|
||||
const storage = new Storage(Settings.docstore.gcs.endpoint)
|
||||
await storage.bucket(Settings.docstore.bucket).deleteFiles()
|
||||
await storage.bucket(Settings.docstore.bucket).delete()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).deleteFiles()
|
||||
await storage.bucket(`${Settings.docstore.bucket}-deleted`).delete()
|
||||
})
|
||||
|
||||
describe('for an archived doc', function () {
|
||||
before(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.timeout(1000 * 30)
|
||||
this.doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['foo', 'bar'],
|
||||
ranges: {},
|
||||
version: 2,
|
||||
}
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
this.doc.lines,
|
||||
this.doc.version,
|
||||
this.doc.ranges,
|
||||
error => {
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
DocstoreClient.archiveDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
(error, res) => {
|
||||
this.res = res
|
||||
if (error) {
|
||||
return done(error)
|
||||
}
|
||||
done()
|
||||
}
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should successully archive the doc', function (done) {
|
||||
this.res.statusCode.should.equal(204)
|
||||
done()
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from persistent storage', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('archived')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from persistent storage on subsequent requests', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('archived')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
describe('for an non-archived doc', function () {
|
||||
before(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.timeout(1000 * 30)
|
||||
this.doc = {
|
||||
_id: new ObjectId(),
|
||||
lines: ['foo', 'bar'],
|
||||
ranges: {},
|
||||
version: 2,
|
||||
}
|
||||
DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
this.doc.lines,
|
||||
this.doc.version,
|
||||
this.doc.ranges,
|
||||
done
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc lines and version from mongo', function (done) {
|
||||
return DocstoreClient.peekDoc(
|
||||
this.project_id,
|
||||
this.doc._id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(200)
|
||||
res.headers['x-doc-status'].should.equal('active')
|
||||
doc.lines.should.deep.equal(this.doc.lines)
|
||||
doc.version.should.equal(this.doc.version)
|
||||
doc.ranges.should.deep.equal(this.doc.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
137
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
137
services/docstore/test/acceptance/js/GettingDocsTests.js
Normal file
@@ -0,0 +1,137 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Getting a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.lines = ['original', 'lines']
|
||||
this.version = 42
|
||||
this.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
return DocstoreApp.ensureRunning(() => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc exists', function () {
|
||||
return it('should get the doc lines and version', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.lines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.ranges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
return it('should return a 404', function (done) {
|
||||
const missingDocId = new ObjectId()
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
missingDocId,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the doc is a deleted doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.deleted_doc_id = new ObjectId()
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return DocstoreClient.deleteDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
done
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the doc', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
{ include_deleted: true },
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.lines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.ranges)
|
||||
doc.deleted.should.equal(true)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should return a 404 when the query string is not set', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.deleted_doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
res.statusCode.should.equal(404)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
557
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
557
services/docstore/test/acceptance/js/UpdatingDocsTests.js
Normal file
@@ -0,0 +1,557 @@
|
||||
/* eslint-disable
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* DS207: Consider shorter variations of null checks
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const sinon = require('sinon')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const DocstoreApp = require('./helpers/DocstoreApp')
|
||||
|
||||
const DocstoreClient = require('./helpers/DocstoreClient')
|
||||
|
||||
describe('Applying updates to a doc', function () {
|
||||
beforeEach(function (done) {
|
||||
this.project_id = new ObjectId()
|
||||
this.doc_id = new ObjectId()
|
||||
this.originalLines = ['original', 'lines']
|
||||
this.newLines = ['new', 'lines']
|
||||
this.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.version = 42
|
||||
return DocstoreApp.ensureRunning(() => {
|
||||
return DocstoreClient.createDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when nothing has been updated', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = false', function () {
|
||||
return this.body.modified.should.equal(false)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines have changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(2)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.newLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version has changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version + 1,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(1)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version + 1)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', function () {
|
||||
beforeEach(function (done) {
|
||||
DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newLines,
|
||||
this.version - 1,
|
||||
this.newRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 409', function () {
|
||||
this.res.statusCode.should.equal(409)
|
||||
})
|
||||
|
||||
it('should not update the doc in the API', function (done) {
|
||||
DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges have changed', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
this.version,
|
||||
this.newRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the rev', function () {
|
||||
return this.body.rev.should.equal(2)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
doc.ranges.should.deep.equal(this.newRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
beforeEach(function (done) {
|
||||
this.missing_doc_id = new ObjectId()
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.missing_doc_id,
|
||||
this.originalLines,
|
||||
0,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should create the doc', function () {
|
||||
return this.body.rev.should.equal(1)
|
||||
})
|
||||
|
||||
return it('should be retreivable', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.missing_doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(0)
|
||||
doc.ranges.should.deep.equal(this.originalRanges)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when malformed doc lines are provided', function () {
|
||||
describe('when the lines are not an array', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{ foo: 'bar' },
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the lines are not present', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when no version is provided', function () {
|
||||
beforeEach(function (done) {
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.originalLines,
|
||||
null,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 400', function () {
|
||||
return this.res.statusCode.should.equal(400)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
doc.version.should.equal(this.version)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the content is large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1mb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.largeLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a large json payload', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||
this.originalRanges.padding = Array.apply(null, Array(2049)).map(
|
||||
() => line
|
||||
) // 2mb + 1kb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return modified = true', function () {
|
||||
return this.body.modified.should.equal(true)
|
||||
})
|
||||
|
||||
return it('should update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.largeLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the document body is too large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(2049)).map(() => line) // 2mb + 1kb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 413', function () {
|
||||
return this.res.statusCode.should.equal(413)
|
||||
})
|
||||
|
||||
it('should report body too large', function () {
|
||||
return this.res.body.should.equal('document body too large')
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('when the json payload is too large', function () {
|
||||
beforeEach(function (done) {
|
||||
const line = new Array(1025).join('x') // 1kb
|
||||
this.largeLines = Array.apply(null, Array(1024)).map(() => line) // 1kb
|
||||
this.originalRanges.padding = Array.apply(null, Array(6144)).map(
|
||||
() => line
|
||||
) // 6mb
|
||||
return DocstoreClient.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.largeLines,
|
||||
this.version,
|
||||
this.originalRanges,
|
||||
(error, res, body) => {
|
||||
if (error) return done(error)
|
||||
this.res = res
|
||||
this.body = body
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return it('should not update the doc in the API', function (done) {
|
||||
return DocstoreClient.getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{},
|
||||
(error, res, doc) => {
|
||||
if (error) return done(error)
|
||||
doc.lines.should.deep.equal(this.originalLines)
|
||||
return done()
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
26
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
26
services/docstore/test/acceptance/js/helpers/DocstoreApp.js
Normal file
@@ -0,0 +1,26 @@
|
||||
const app = require('../../../../app')
|
||||
const settings = require('@overleaf/settings')
|
||||
|
||||
module.exports = {
|
||||
running: false,
|
||||
initing: false,
|
||||
callbacks: [],
|
||||
ensureRunning(callback) {
|
||||
if (this.running) {
|
||||
return callback()
|
||||
} else if (this.initing) {
|
||||
return this.callbacks.push(callback)
|
||||
}
|
||||
this.initing = true
|
||||
this.callbacks.push(callback)
|
||||
app.listen(settings.internal.docstore.port, '127.0.0.1', error => {
|
||||
if (error != null) {
|
||||
throw error
|
||||
}
|
||||
this.running = true
|
||||
for (callback of Array.from(this.callbacks)) {
|
||||
callback()
|
||||
}
|
||||
})
|
||||
},
|
||||
}
|
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
195
services/docstore/test/acceptance/js/helpers/DocstoreClient.js
Normal file
@@ -0,0 +1,195 @@
|
||||
let DocstoreClient
|
||||
const request = require('request').defaults({ jar: false })
|
||||
const settings = require('@overleaf/settings')
|
||||
const Persistor = require('../../../../app/js/PersistorManager')
|
||||
|
||||
async function streamToString(stream) {
|
||||
const chunks = []
|
||||
return await new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => chunks.push(chunk))
|
||||
stream.on('error', reject)
|
||||
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')))
|
||||
})
|
||||
}
|
||||
|
||||
async function getStringFromPersistor(persistor, bucket, key) {
|
||||
const stream = await persistor.getObjectStream(bucket, key, {})
|
||||
stream.resume()
|
||||
return await streamToString(stream)
|
||||
}
|
||||
|
||||
module.exports = DocstoreClient = {
|
||||
createDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
return DocstoreClient.updateDoc(
|
||||
projectId,
|
||||
docId,
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getDoc(projectId, docId, qs, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: true,
|
||||
qs,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
peekDoc(projectId, docId, qs, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/peek`,
|
||||
json: true,
|
||||
qs,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
isDocDeleted(projectId, docId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/deleted`,
|
||||
json: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getAllDocs(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc`,
|
||||
json: true,
|
||||
},
|
||||
(req, res, body) => {
|
||||
callback(req, res, body)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getAllDeletedDocs(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc-deleted`,
|
||||
json: true,
|
||||
},
|
||||
(error, res, body) => {
|
||||
if (error) return callback(error)
|
||||
if (res.statusCode !== 200) {
|
||||
return callback(new Error('unexpected statusCode'))
|
||||
}
|
||||
callback(null, body)
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
getAllRanges(projectId, callback) {
|
||||
request.get(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/ranges`,
|
||||
json: true,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
updateDoc(projectId, docId, lines, version, ranges, callback) {
|
||||
return request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: {
|
||||
lines,
|
||||
version,
|
||||
ranges,
|
||||
},
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDoc(projectId, docId, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
new Date(),
|
||||
'main.tex',
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithDate(projectId, docId, date, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
date,
|
||||
'main.tex',
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithName(projectId, docId, name, callback) {
|
||||
DocstoreClient.deleteDocWithDateAndName(
|
||||
projectId,
|
||||
docId,
|
||||
new Date(),
|
||||
name,
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
deleteDocWithDateAndName(projectId, docId, deletedAt, name, callback) {
|
||||
request.patch(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}`,
|
||||
json: { name, deleted: true, deletedAt },
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
archiveAllDoc(projectId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/archive`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
archiveDoc(projectId, docId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/doc/${docId}/archive`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
destroyAllDoc(projectId, callback) {
|
||||
request.post(
|
||||
{
|
||||
url: `http://127.0.0.1:${settings.internal.docstore.port}/project/${projectId}/destroy`,
|
||||
},
|
||||
callback
|
||||
)
|
||||
},
|
||||
|
||||
getS3Doc(projectId, docId, callback) {
|
||||
getStringFromPersistor(
|
||||
Persistor,
|
||||
settings.docstore.bucket,
|
||||
`${projectId}/${docId}`
|
||||
)
|
||||
.then(data => {
|
||||
callback(null, JSON.parse(data))
|
||||
})
|
||||
.catch(callback)
|
||||
},
|
||||
}
|
55
services/docstore/test/setup.js
Normal file
55
services/docstore/test/setup.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const chai = require('chai')
|
||||
const sinon = require('sinon')
|
||||
const sinonChai = require('sinon-chai')
|
||||
const chaiAsPromised = require('chai-as-promised')
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const timersPromises = require('node:timers/promises')
|
||||
|
||||
// ensure every ObjectId has the id string as a property for correct comparisons
|
||||
require('mongodb-legacy').ObjectId.cacheHexString = true
|
||||
|
||||
process.env.BACKEND = 'gcs'
|
||||
|
||||
// Chai configuration
|
||||
chai.should()
|
||||
chai.use(sinonChai)
|
||||
chai.use(chaiAsPromised)
|
||||
|
||||
// Global stubs
|
||||
const sandbox = sinon.createSandbox()
|
||||
const stubs = {
|
||||
logger: {
|
||||
debug: sandbox.stub(),
|
||||
log: sandbox.stub(),
|
||||
info: sandbox.stub(),
|
||||
warn: sandbox.stub(),
|
||||
err: sandbox.stub(),
|
||||
error: sandbox.stub(),
|
||||
fatal: sandbox.stub(),
|
||||
},
|
||||
}
|
||||
|
||||
// SandboxedModule configuration
|
||||
SandboxedModule.configure({
|
||||
requires: {
|
||||
'@overleaf/logger': stubs.logger,
|
||||
'timers/promises': timersPromises,
|
||||
'mongodb-legacy': require('mongodb-legacy'),
|
||||
},
|
||||
globals: { Buffer, JSON, Math, console, process },
|
||||
sourceTransformers: {
|
||||
removeNodePrefix: function (source) {
|
||||
return source.replace(/require\(['"]node:/g, "require('")
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
exports.mochaHooks = {
|
||||
beforeEach() {
|
||||
this.logger = stubs.logger
|
||||
},
|
||||
|
||||
afterEach() {
|
||||
sandbox.reset()
|
||||
},
|
||||
}
|
580
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
580
services/docstore/test/unit/js/DocArchiveManagerTests.js
Normal file
@@ -0,0 +1,580 @@
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/DocArchiveManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
const StreamToBuffer = require('../../../app/js/StreamToBuffer').promises
|
||||
|
||||
describe('DocArchiveManager', function () {
|
||||
let DocArchiveManager,
|
||||
PersistorManager,
|
||||
MongoManager,
|
||||
RangeManager,
|
||||
Settings,
|
||||
Crypto,
|
||||
StreamUtils,
|
||||
HashDigest,
|
||||
HashUpdate,
|
||||
archivedDocs,
|
||||
mongoDocs,
|
||||
archivedDoc,
|
||||
archivedDocJson,
|
||||
md5Sum,
|
||||
projectId,
|
||||
readStream,
|
||||
stream,
|
||||
streamToBuffer
|
||||
|
||||
beforeEach(function () {
|
||||
md5Sum = 'decafbad'
|
||||
|
||||
RangeManager = {
|
||||
jsonRangesToMongo: sinon.stub().returns({ mongo: 'ranges' }),
|
||||
}
|
||||
Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
parallelArchiveJobs: 3,
|
||||
}
|
||||
HashDigest = sinon.stub().returns(md5Sum)
|
||||
HashUpdate = sinon.stub().returns({ digest: HashDigest })
|
||||
Crypto = {
|
||||
createHash: sinon.stub().returns({ update: HashUpdate }),
|
||||
}
|
||||
StreamUtils = {
|
||||
ReadableString: sinon.stub().returns({ stream: 'readStream' }),
|
||||
}
|
||||
|
||||
projectId = new ObjectId()
|
||||
archivedDocs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
mongoDocs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['one', 'two', 'three'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['aaa', 'bbb', 'ccc'],
|
||||
rev: 4,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
inS3: true,
|
||||
rev: 6,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['111', '222', '333'],
|
||||
rev: 6,
|
||||
},
|
||||
]
|
||||
|
||||
archivedDoc = {
|
||||
lines: mongoDocs[0].lines,
|
||||
rev: mongoDocs[0].rev,
|
||||
}
|
||||
|
||||
archivedDocJson = JSON.stringify({ ...archivedDoc, schema_v: 1 })
|
||||
|
||||
stream = {
|
||||
on: sinon.stub(),
|
||||
resume: sinon.stub(),
|
||||
}
|
||||
stream.on.withArgs('data').yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
stream.on.withArgs('end').yields()
|
||||
|
||||
readStream = {
|
||||
stream: 'readStream',
|
||||
}
|
||||
|
||||
PersistorManager = {
|
||||
getObjectStream: sinon.stub().resolves(stream),
|
||||
sendStream: sinon.stub().resolves(),
|
||||
getObjectMd5Hash: sinon.stub().resolves(md5Sum),
|
||||
deleteObject: sinon.stub().resolves(),
|
||||
deleteDirectory: sinon.stub().resolves(),
|
||||
}
|
||||
|
||||
const getNonArchivedProjectDocIds = sinon.stub()
|
||||
getNonArchivedProjectDocIds
|
||||
.onCall(0)
|
||||
.resolves(mongoDocs.filter(doc => !doc.inS3).map(doc => doc._id))
|
||||
getNonArchivedProjectDocIds.onCall(1).resolves([])
|
||||
|
||||
const getArchivedProjectDocs = sinon.stub()
|
||||
getArchivedProjectDocs.onCall(0).resolves(archivedDocs)
|
||||
getArchivedProjectDocs.onCall(1).resolves([])
|
||||
|
||||
const fakeGetDoc = async (_projectId, _docId) => {
|
||||
if (_projectId.equals(projectId)) {
|
||||
for (const mongoDoc of mongoDocs.concat(archivedDocs)) {
|
||||
if (mongoDoc._id.equals(_docId)) {
|
||||
return mongoDoc
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new Errors.NotFoundError()
|
||||
}
|
||||
|
||||
MongoManager = {
|
||||
promises: {
|
||||
markDocAsArchived: sinon.stub().resolves(),
|
||||
restoreArchivedDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
getProjectsDocs: sinon.stub().resolves(mongoDocs),
|
||||
getNonDeletedArchivedProjectDocs: getArchivedProjectDocs,
|
||||
getNonArchivedProjectDocIds,
|
||||
getArchivedProjectDocs,
|
||||
findDoc: sinon.stub().callsFake(fakeGetDoc),
|
||||
getDocForArchiving: sinon.stub().callsFake(fakeGetDoc),
|
||||
destroyProject: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
|
||||
// Wrap streamToBuffer so that we can pass in something that it expects (in
|
||||
// this case, a Promise) rather than a stubbed stream object
|
||||
streamToBuffer = {
|
||||
promises: {
|
||||
streamToBuffer: async () => {
|
||||
const inputStream = new Promise(resolve => {
|
||||
stream.on('data', data => resolve(data))
|
||||
})
|
||||
|
||||
const value = await StreamToBuffer.streamToBuffer(
|
||||
'testProjectId',
|
||||
'testDocId',
|
||||
inputStream
|
||||
)
|
||||
|
||||
return value
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
DocArchiveManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
crypto: Crypto,
|
||||
'@overleaf/stream-utils': StreamUtils,
|
||||
'./MongoManager': MongoManager,
|
||||
'./RangeManager': RangeManager,
|
||||
'./PersistorManager': PersistorManager,
|
||||
'./Errors': Errors,
|
||||
'./StreamToBuffer': streamToBuffer,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveDoc', function () {
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
).to.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should throw an error if the doc has no lines', async function () {
|
||||
const doc = mongoDocs[0]
|
||||
doc.lines = null
|
||||
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, doc._id)
|
||||
).to.eventually.be.rejectedWith('doc has no lines')
|
||||
})
|
||||
|
||||
it('should add the schema version', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[1]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
sinon.match(/"schema_v":1/)
|
||||
)
|
||||
})
|
||||
|
||||
it('should calculate the hex md5 sum of the content', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(Crypto.createHash).to.have.been.calledWith('md5')
|
||||
expect(HashUpdate).to.have.been.calledWith(archivedDocJson)
|
||||
expect(HashDigest).to.have.been.calledWith('hex')
|
||||
})
|
||||
|
||||
it('should pass the md5 hash to the object persistor for verification', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
{ sourceMd5: md5Sum }
|
||||
)
|
||||
})
|
||||
|
||||
it('should pass the correct bucket and key to the persistor', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${mongoDocs[0]._id}`
|
||||
)
|
||||
})
|
||||
|
||||
it('should create a stream from the encoded json and send it', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(StreamUtils.ReadableString).to.have.been.calledWith(
|
||||
archivedDocJson
|
||||
)
|
||||
expect(PersistorManager.sendStream).to.have.been.calledWith(
|
||||
sinon.match.any,
|
||||
sinon.match.any,
|
||||
readStream
|
||||
)
|
||||
})
|
||||
|
||||
it('should mark the doc as archived', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[0]._id,
|
||||
mongoDocs[0].rev
|
||||
)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getDocForArchiving).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null bytes in the result', function () {
|
||||
const _stringify = JSON.stringify
|
||||
|
||||
beforeEach(function () {
|
||||
JSON.stringify = sinon.stub().returns('{"bad": "\u0000"}')
|
||||
})
|
||||
|
||||
afterEach(function () {
|
||||
JSON.stringify = _stringify
|
||||
})
|
||||
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
).to.eventually.be.rejectedWith('null bytes detected')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unarchiveDoc', function () {
|
||||
let docId, lines, rev
|
||||
|
||||
describe('when the doc is in S3', function () {
|
||||
beforeEach(function () {
|
||||
MongoManager.promises.findDoc = sinon
|
||||
.stub()
|
||||
.resolves({ inS3: true, rev })
|
||||
docId = mongoDocs[0]._id
|
||||
lines = ['doc', 'lines']
|
||||
rev = 123
|
||||
})
|
||||
|
||||
it('should resolve when passed a valid document', async function () {
|
||||
await expect(DocArchiveManager.promises.unarchiveDoc(projectId, docId))
|
||||
.to.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should test md5 validity with the raw buffer', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(HashUpdate).to.have.been.calledWith(
|
||||
sinon.match.instanceOf(Buffer)
|
||||
)
|
||||
})
|
||||
|
||||
it('should throw an error if the md5 does not match', async function () {
|
||||
PersistorManager.getObjectMd5Hash.resolves('badf00d')
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.be.instanceof(Errors.Md5MismatchError)
|
||||
})
|
||||
|
||||
it('should restore the doc in Mongo', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, archivedDoc)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should error out on archived doc', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.match(
|
||||
/found archived doc, but archiving backend is not configured/
|
||||
)
|
||||
})
|
||||
|
||||
it('should return early on non-archived doc', async function () {
|
||||
MongoManager.promises.findDoc = sinon.stub().resolves({ rev })
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectMd5Hash).to.not.have.been.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('doc contents', function () {
|
||||
let archivedDoc
|
||||
|
||||
describe('when the doc has the old schema', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = lines
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the docs lines', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and ranges', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = {
|
||||
lines,
|
||||
ranges: { json: 'ranges' },
|
||||
rev: 456,
|
||||
schema_v: 1,
|
||||
}
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return the doc lines and ranges', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, {
|
||||
lines,
|
||||
ranges: { mongo: 'ranges' },
|
||||
rev: 456,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no ranges', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, rev: 456, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should return only the doc lines', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev: 456 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with the new schema and no rev', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, schema_v: 1 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should use the rev obtained from Mongo', async function () {
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(
|
||||
MongoManager.promises.restoreArchivedDoc
|
||||
).to.have.been.calledWith(projectId, docId, { lines, rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('with an unrecognised schema', function () {
|
||||
beforeEach(function () {
|
||||
archivedDoc = { lines, schema_v: 2 }
|
||||
archivedDocJson = JSON.stringify(archivedDoc)
|
||||
stream.on
|
||||
.withArgs('data')
|
||||
.yields(Buffer.from(archivedDocJson, 'utf8'))
|
||||
})
|
||||
|
||||
it('should throw an error', async function () {
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejectedWith(
|
||||
"I don't understand the doc format in s3"
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should not do anything if the file is already unarchived', async function () {
|
||||
MongoManager.promises.findDoc.resolves({ inS3: false })
|
||||
await DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
expect(PersistorManager.getObjectStream).not.to.have.been.called
|
||||
})
|
||||
|
||||
it('should throw an error if the file is not found', async function () {
|
||||
PersistorManager.getObjectStream = sinon
|
||||
.stub()
|
||||
.rejects(new Errors.NotFoundError())
|
||||
await expect(
|
||||
DocArchiveManager.promises.unarchiveDoc(projectId, docId)
|
||||
).to.eventually.be.rejected.and.be.instanceof(Errors.NotFoundError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
describe('when archiving is enabled', function () {
|
||||
beforeEach(async function () {
|
||||
await DocArchiveManager.promises.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
||||
projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should delete the project in the persistor', function () {
|
||||
expect(PersistorManager.deleteDirectory).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
projectId
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when archiving is disabled', function () {
|
||||
beforeEach(async function () {
|
||||
Settings.docstore.backend = ''
|
||||
await DocArchiveManager.promises.destroyProject(projectId)
|
||||
})
|
||||
|
||||
it('should delete the project in Mongo', function () {
|
||||
expect(MongoManager.promises.destroyProject).to.have.been.calledWith(
|
||||
projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should not delete the project in the persistor', function () {
|
||||
expect(PersistorManager.deleteDirectory).not.to.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
await expect(DocArchiveManager.promises.archiveAllDocs(projectId)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should archive all project docs which are not in s3', async function () {
|
||||
await DocArchiveManager.promises.archiveAllDocs(projectId)
|
||||
// not inS3
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[0]._id
|
||||
)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[1]._id
|
||||
)
|
||||
expect(MongoManager.promises.markDocAsArchived).to.have.been.calledWith(
|
||||
projectId,
|
||||
mongoDocs[4]._id
|
||||
)
|
||||
|
||||
// inS3
|
||||
expect(
|
||||
MongoManager.promises.markDocAsArchived
|
||||
).not.to.have.been.calledWith(projectId, mongoDocs[2]._id)
|
||||
expect(
|
||||
MongoManager.promises.markDocAsArchived
|
||||
).not.to.have.been.calledWith(projectId, mongoDocs[3]._id)
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getNonArchivedProjectDocIds).to.not.have
|
||||
.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
it('should resolve with valid arguments', async function () {
|
||||
await expect(DocArchiveManager.promises.unArchiveAllDocs(projectId)).to
|
||||
.eventually.be.fulfilled
|
||||
})
|
||||
|
||||
it('should unarchive all inS3 docs', async function () {
|
||||
await DocArchiveManager.promises.unArchiveAllDocs(projectId)
|
||||
|
||||
for (const doc of archivedDocs) {
|
||||
expect(PersistorManager.getObjectStream).to.have.been.calledWith(
|
||||
Settings.docstore.bucket,
|
||||
`${projectId}/${doc._id}`
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
describe('when archiving is not configured', function () {
|
||||
beforeEach(function () {
|
||||
Settings.docstore.backend = undefined
|
||||
})
|
||||
|
||||
it('should bail out early', async function () {
|
||||
await DocArchiveManager.promises.archiveDoc(projectId, mongoDocs[0]._id)
|
||||
expect(MongoManager.promises.getNonDeletedArchivedProjectDocs).to.not
|
||||
.have.been.called
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
755
services/docstore/test/unit/js/DocManagerTests.js
Normal file
755
services/docstore/test/unit/js/DocManagerTests.js
Normal file
@@ -0,0 +1,755 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/DocManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('DocManager', function () {
|
||||
beforeEach(function () {
|
||||
this.doc_id = new ObjectId().toString()
|
||||
this.project_id = new ObjectId().toString()
|
||||
this.another_project_id = new ObjectId().toString()
|
||||
this.stubbedError = new Error('blew up')
|
||||
this.version = 42
|
||||
|
||||
this.MongoManager = {
|
||||
promises: {
|
||||
findDoc: sinon.stub(),
|
||||
getProjectsDocs: sinon.stub(),
|
||||
patchDoc: sinon.stub().resolves(),
|
||||
upsertIntoDocCollection: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
promises: {
|
||||
unarchiveDoc: sinon.stub(),
|
||||
unArchiveAllDocs: sinon.stub(),
|
||||
archiveDoc: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.RangeManager = {
|
||||
jsonRangesToMongo(r) {
|
||||
return r
|
||||
},
|
||||
shouldUpdateRanges: sinon.stub().returns(false),
|
||||
}
|
||||
this.settings = { docstore: {} }
|
||||
|
||||
this.DocManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./MongoManager': this.MongoManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'./RangeManager': this.RangeManager,
|
||||
'@overleaf/settings': this.settings,
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFullDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
lines: ['2134'],
|
||||
}
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
||||
const doc = await this.DocManager.promises.getFullDoc(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
doc.should.equal(this.doc)
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
rev: true,
|
||||
deleted: true,
|
||||
version: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises.getFullDoc(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
this.doc = { lines: ['2134'] }
|
||||
})
|
||||
|
||||
it('should call get doc with a quick filter', async function () {
|
||||
this.DocManager.promises._getDoc.resolves(this.doc)
|
||||
const doc = await this.DocManager.promises.getDocLines(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
doc.should.equal(this.doc)
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
lines: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return error when get doc errors', async function () {
|
||||
this.DocManager.promises._getDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises.getDocLines(this.project_id, this.doc_id)
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.project = { name: 'mock-project' }
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: this.version,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when using a filter', function () {
|
||||
beforeEach(function () {
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
})
|
||||
|
||||
it('should error if inS3 is not set to true', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: false,
|
||||
})
|
||||
).to.be.rejected
|
||||
})
|
||||
|
||||
it('should always get inS3 even when no filter is passed', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id)
|
||||
).to.be.rejected
|
||||
this.MongoManager.promises.findDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should not error if inS3 is set to true', async function () {
|
||||
await this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
inS3: true,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is in the doc collection', function () {
|
||||
beforeEach(async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
this.result = await this.DocManager.promises._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{ version: true, inS3: true }
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc collection', function () {
|
||||
this.MongoManager.promises.findDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc with the version', function () {
|
||||
this.result.lines.should.equal(this.doc.lines)
|
||||
this.result.version.should.equal(this.version)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when MongoManager.findDoc errors', function () {
|
||||
it('should return the error', async function () {
|
||||
this.MongoManager.promises.findDoc.rejects(this.stubbedError)
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(this.stubbedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc is archived', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
version: 2,
|
||||
inS3: true,
|
||||
}
|
||||
this.unarchivedDoc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
version: 2,
|
||||
inS3: false,
|
||||
}
|
||||
this.MongoManager.promises.findDoc.resolves(this.doc)
|
||||
this.DocArchiveManager.promises.unarchiveDoc.callsFake(
|
||||
async (projectId, docId) => {
|
||||
this.MongoManager.promises.findDoc.resolves({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
}
|
||||
)
|
||||
this.result = await this.DocManager.promises._getDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
{
|
||||
version: true,
|
||||
inS3: true,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should call the DocArchive to unarchive the doc', function () {
|
||||
this.DocArchiveManager.promises.unarchiveDoc
|
||||
.calledWith(this.project_id, this.doc_id)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should look up the doc twice', function () {
|
||||
this.MongoManager.promises.findDoc.calledTwice.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.result).to.deep.equal({
|
||||
...this.unarchivedDoc,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist in the docs collection', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises._getDoc(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
inS3: true,
|
||||
})
|
||||
).to.be.rejectedWith(
|
||||
`No such doc: ${this.doc_id} in project ${this.project_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllNonDeletedDocs', function () {
|
||||
describe('when the project exists', function () {
|
||||
beforeEach(async function () {
|
||||
this.docs = [
|
||||
{
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: ['mock-lines'],
|
||||
},
|
||||
]
|
||||
this.MongoManager.promises.getProjectsDocs.resolves(this.docs)
|
||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(this.docs)
|
||||
this.filter = { lines: true }
|
||||
this.result = await this.DocManager.promises.getAllNonDeletedDocs(
|
||||
this.project_id,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the project from the database', function () {
|
||||
this.MongoManager.promises.getProjectsDocs.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal(this.docs)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there are no docs for the project', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.getProjectsDocs.resolves(null)
|
||||
this.DocArchiveManager.promises.unArchiveAllDocs.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises.getAllNonDeletedDocs(
|
||||
this.project_id,
|
||||
this.filter
|
||||
)
|
||||
).to.be.rejectedWith(`No docs for project ${this.project_id}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
describe('when the doc exists', function () {
|
||||
beforeEach(function () {
|
||||
this.lines = ['mock', 'doc', 'lines']
|
||||
this.rev = 77
|
||||
this.MongoManager.promises.findDoc.resolves({
|
||||
_id: new ObjectId(this.doc_id),
|
||||
})
|
||||
this.meta = {}
|
||||
})
|
||||
|
||||
describe('standard path', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the doc', function () {
|
||||
expect(this.MongoManager.promises.findDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id
|
||||
)
|
||||
})
|
||||
|
||||
it('should persist the meta', function () {
|
||||
expect(this.MongoManager.promises.patchDoc).to.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush disabled and deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = true
|
||||
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and not deleting a doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = false
|
||||
this.meta.deleted = false
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not flush the doc out of mongo', function () {
|
||||
expect(this.DocArchiveManager.promises.archiveDoc).to.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('background flush enabled and deleting a doc', function () {
|
||||
beforeEach(function () {
|
||||
this.settings.docstore.archiveOnSoftDelete = true
|
||||
this.meta.deleted = true
|
||||
})
|
||||
|
||||
describe('when the background flush succeeds', function () {
|
||||
beforeEach(async function () {
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should not log a warning', function () {
|
||||
expect(this.logger.warn).to.not.have.been.called
|
||||
})
|
||||
|
||||
it('should flush the doc out of mongo', function () {
|
||||
expect(
|
||||
this.DocArchiveManager.promises.archiveDoc
|
||||
).to.have.been.calledWith(this.project_id, this.doc_id)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the background flush fails', function () {
|
||||
beforeEach(async function () {
|
||||
this.err = new Error('foo')
|
||||
this.DocArchiveManager.promises.archiveDoc.rejects(this.err)
|
||||
await this.DocManager.promises.patchDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should log a warning', function () {
|
||||
expect(this.logger.warn).to.have.been.calledWith(
|
||||
sinon.match({
|
||||
projectId: this.project_id,
|
||||
docId: this.doc_id,
|
||||
err: this.err,
|
||||
}),
|
||||
'archiving a single doc in the background failed'
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
it('should return a NotFoundError', async function () {
|
||||
this.MongoManager.promises.findDoc.resolves(null)
|
||||
await expect(
|
||||
this.DocManager.promises.patchDoc(this.project_id, this.doc_id, {})
|
||||
).to.be.rejectedWith(
|
||||
`No such project/doc to delete: ${this.project_id}/${this.doc_id}`
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.oldDocLines = ['old', 'doc', 'lines']
|
||||
this.newDocLines = ['new', 'doc', 'lines']
|
||||
this.originalRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.newRanges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId().toString(),
|
||||
op: { i: 'bar', p: 6 },
|
||||
meta: {
|
||||
user_id: new ObjectId().toString(),
|
||||
ts: new Date().toString(),
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
this.version = 42
|
||||
this.doc = {
|
||||
_id: this.doc_id,
|
||||
project_id: this.project_id,
|
||||
lines: this.oldDocLines,
|
||||
rev: (this.rev = 5),
|
||||
version: this.version,
|
||||
ranges: this.originalRanges,
|
||||
}
|
||||
|
||||
this.DocManager.promises._getDoc = sinon.stub()
|
||||
})
|
||||
|
||||
describe('when only the doc lines have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should get the existing doc', function () {
|
||||
this.DocManager.promises._getDoc
|
||||
.calledWith(this.project_id, this.doc_id, {
|
||||
version: true,
|
||||
rev: true,
|
||||
lines: true,
|
||||
ranges: true,
|
||||
inS3: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
lines: this.newDocLines,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges have changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the ranges', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.calledWith(this.project_id, this.doc_id, this.rev, {
|
||||
ranges: this.newRanges,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when only the version has changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version + 1,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should update the version', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{ version: this.version + 1 }
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc has not changed at all', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the ranges or lines or version', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the old rev and modified == false', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version is null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
null,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the lines are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
null,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the ranges are null', function () {
|
||||
it('should return an error', async function () {
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
null
|
||||
)
|
||||
).to.be.rejectedWith('no lines, version or ranges provided')
|
||||
})
|
||||
})
|
||||
|
||||
describe('when there is a generic error getting the doc', function () {
|
||||
beforeEach(async function () {
|
||||
this.error = new Error('doc could not be found')
|
||||
this.DocManager.promises._getDoc = sinon.stub().rejects(this.error)
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(this.error)
|
||||
})
|
||||
|
||||
it('should not upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.not.have.been
|
||||
.called
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the version was decremented', function () {
|
||||
it('should return an error', async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
await expect(
|
||||
this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version - 1,
|
||||
this.originalRanges
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocVersionDecrementedError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines have not changed', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.oldDocLines.slice(),
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should not update the doc', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.called.should.equal(
|
||||
false
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the existing rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: false, rev: this.rev })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc does not exist', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(null)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version,
|
||||
this.originalRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the document to the doc collection', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
undefined,
|
||||
{
|
||||
lines: this.newDocLines,
|
||||
ranges: this.originalRanges,
|
||||
version: this.version,
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: 1 })
|
||||
})
|
||||
})
|
||||
|
||||
describe('when another update is racing', function () {
|
||||
beforeEach(async function () {
|
||||
this.DocManager.promises._getDoc = sinon.stub().resolves(this.doc)
|
||||
this.MongoManager.promises.upsertIntoDocCollection
|
||||
.onFirstCall()
|
||||
.rejects(new Errors.DocRevValueError())
|
||||
this.RangeManager.shouldUpdateRanges.returns(true)
|
||||
this.result = await this.DocManager.promises.updateDoc(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.newDocLines,
|
||||
this.version + 1,
|
||||
this.newRanges
|
||||
)
|
||||
})
|
||||
|
||||
it('should upsert the doc twice', function () {
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been.calledWith(
|
||||
this.project_id,
|
||||
this.doc_id,
|
||||
this.rev,
|
||||
{
|
||||
ranges: this.newRanges,
|
||||
lines: this.newDocLines,
|
||||
version: this.version + 1,
|
||||
}
|
||||
)
|
||||
this.MongoManager.promises.upsertIntoDocCollection.should.have.been
|
||||
.calledTwice
|
||||
})
|
||||
|
||||
it('should return the new rev', function () {
|
||||
expect(this.result).to.deep.equal({ modified: true, rev: this.rev + 1 })
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
578
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
578
services/docstore/test/unit/js/HttpControllerTests.js
Normal file
@@ -0,0 +1,578 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/HttpController'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('HttpController', function () {
|
||||
beforeEach(function () {
|
||||
const settings = {
|
||||
max_doc_length: 2 * 1024 * 1024,
|
||||
}
|
||||
this.DocArchiveManager = {
|
||||
unArchiveAllDocs: sinon.stub().yields(),
|
||||
}
|
||||
this.DocManager = {}
|
||||
this.HttpController = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./DocManager': this.DocManager,
|
||||
'./DocArchiveManager': this.DocArchiveManager,
|
||||
'@overleaf/settings': settings,
|
||||
'./HealthChecker': {},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.res = {
|
||||
send: sinon.stub(),
|
||||
sendStatus: sinon.stub(),
|
||||
json: sinon.stub(),
|
||||
setHeader: sinon.stub(),
|
||||
}
|
||||
this.res.status = sinon.stub().returns(this.res)
|
||||
this.req = { query: {} }
|
||||
this.next = sinon.stub()
|
||||
this.projectId = 'mock-project-id'
|
||||
this.docId = 'mock-doc-id'
|
||||
this.doc = {
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
this.deletedDoc = {
|
||||
deleted: true,
|
||||
_id: this.docId,
|
||||
lines: ['mock', 'lines', ' here', '', '', ' spaces '],
|
||||
version: 42,
|
||||
rev: 5,
|
||||
}
|
||||
})
|
||||
|
||||
describe('getDoc', function () {
|
||||
describe('without deleted docs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.doc)
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document with the version (including deleted)', function () {
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('which is deleted', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getFullDoc = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.deletedDoc)
|
||||
})
|
||||
|
||||
it('should get the doc from the doc manager', function () {
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.DocManager.getFullDoc
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return 404 if the query string delete is not set ', function () {
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.sendStatus.calledWith(404).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON if include_deleted is set to true', function () {
|
||||
this.req.query.include_deleted = 'true'
|
||||
this.HttpController.getDoc(this.req, this.res, this.next)
|
||||
this.res.json
|
||||
.calledWith({
|
||||
_id: this.docId,
|
||||
lines: this.doc.lines,
|
||||
rev: this.doc.rev,
|
||||
deleted: true,
|
||||
version: this.doc.version,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRawDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.DocManager.getDocLines = sinon.stub().callsArgWith(2, null, this.doc)
|
||||
this.HttpController.getRawDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get the document without the version', function () {
|
||||
this.DocManager.getDocLines
|
||||
.calledWith(this.projectId, this.docId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should set the content type header', function () {
|
||||
this.res.setHeader
|
||||
.calledWith('content-type', 'text/plain')
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should send the raw version of the doc', function () {
|
||||
assert.deepEqual(
|
||||
this.res.send.args[0][0],
|
||||
`${this.doc.lines[0]}\n${this.doc.lines[1]}\n${this.doc.lines[2]}\n${this.doc.lines[3]}\n${this.doc.lines[4]}\n${this.doc.lines[5]}`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllDocs', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) docs', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { lines: true, rev: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with null lines', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: null,
|
||||
rev: 2,
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the doc with fallback lines', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: [],
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
lines: this.docs[1].lines,
|
||||
rev: this.docs[1].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a null doc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'one'],
|
||||
rev: 2,
|
||||
},
|
||||
null,
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
lines: ['mock', 'lines', 'two'],
|
||||
rev: 4,
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return the non null docs as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
lines: this.docs[0].lines,
|
||||
rev: this.docs[0].rev,
|
||||
},
|
||||
{
|
||||
_id: this.docs[2]._id.toString(),
|
||||
lines: this.docs[2].lines,
|
||||
rev: this.docs[2].rev,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should log out an error', function () {
|
||||
this.logger.error
|
||||
.calledWith(
|
||||
{
|
||||
err: sinon.match.has('message', 'null doc'),
|
||||
projectId: this.projectId,
|
||||
},
|
||||
'encountered null doc'
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllRanges', function () {
|
||||
describe('normally', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.docs = [
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'one' },
|
||||
},
|
||||
{
|
||||
_id: new ObjectId(),
|
||||
ranges: { mock_ranges: 'two' },
|
||||
},
|
||||
]
|
||||
this.DocManager.getAllNonDeletedDocs = sinon
|
||||
.stub()
|
||||
.callsArgWith(2, null, this.docs)
|
||||
this.HttpController.getAllRanges(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should get all the (non-deleted) doc ranges', function () {
|
||||
this.DocManager.getAllNonDeletedDocs
|
||||
.calledWith(this.projectId, { ranges: true })
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc as JSON', function () {
|
||||
this.res.json
|
||||
.calledWith([
|
||||
{
|
||||
_id: this.docs[0]._id.toString(),
|
||||
ranges: this.docs[0].ranges,
|
||||
},
|
||||
{
|
||||
_id: this.docs[1]._id.toString(),
|
||||
ranges: this.docs[1].ranges,
|
||||
},
|
||||
])
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were updated', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.yields(null, true, (this.rev = 5))
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should update the document', function () {
|
||||
this.DocManager.updateDoc
|
||||
.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.lines,
|
||||
this.version,
|
||||
this.ranges
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: true, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines exist and were not updated', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = ['hello', 'world']),
|
||||
version: (this.version = 42),
|
||||
ranges: {},
|
||||
}
|
||||
this.DocManager.updateDoc = sinon
|
||||
.stub()
|
||||
.yields(null, false, (this.rev = 5))
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return a modified status', function () {
|
||||
this.res.json
|
||||
.calledWith({ modified: false, rev: this.rev })
|
||||
.should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc lines are not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { version: 42, ranges: {} }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc version are not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { version: 42, lines: ['hello world'] }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc ranges is not provided', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { lines: ['foo'], version: 42 }
|
||||
this.DocManager.updateDoc = sinon.stub().yields(null, false)
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should not update the document', function () {
|
||||
this.DocManager.updateDoc.called.should.equal(false)
|
||||
})
|
||||
|
||||
it('should return a 400 (bad request) response', function () {
|
||||
this.res.sendStatus.calledWith(400).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when the doc body is too large', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = {
|
||||
lines: (this.lines = Array(2049).fill('a'.repeat(1024))),
|
||||
version: (this.version = 42),
|
||||
ranges: (this.ranges = { changes: 'mock' }),
|
||||
}
|
||||
this.HttpController.updateDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should return a 413 (too large) response', function () {
|
||||
sinon.assert.calledWith(this.res.status, 413)
|
||||
})
|
||||
|
||||
it('should report that the document body is too large', function () {
|
||||
sinon.assert.calledWith(this.res.send, 'document body too large')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = {
|
||||
project_id: this.projectId,
|
||||
doc_id: this.docId,
|
||||
}
|
||||
this.req.body = { name: 'foo.tex' }
|
||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should delete the document', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(204)
|
||||
})
|
||||
|
||||
describe('with an invalid payload', function () {
|
||||
beforeEach(function () {
|
||||
this.req.body = { cannot: 'happen' }
|
||||
|
||||
this.DocManager.patchDoc = sinon.stub().yields(null)
|
||||
this.HttpController.patchDoc(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should log a message', function () {
|
||||
expect(this.logger.fatal).to.have.been.calledWith(
|
||||
{ field: 'cannot' },
|
||||
'joi validation for pathDoc is broken'
|
||||
)
|
||||
})
|
||||
|
||||
it('should not pass the invalid field along', function () {
|
||||
expect(this.DocManager.patchDoc).to.have.been.calledWith(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
{}
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('archiveAllDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.archiveAllDocs = sinon.stub().callsArg(1)
|
||||
this.HttpController.archiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should archive the project', function () {
|
||||
this.DocArchiveManager.archiveAllDocs
|
||||
.calledWith(this.projectId)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return a 204 (No Content)', function () {
|
||||
this.res.sendStatus.calledWith(204).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('unArchiveAllDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
})
|
||||
|
||||
describe('on success', function () {
|
||||
beforeEach(function (done) {
|
||||
this.res.sendStatus.callsFake(() => done())
|
||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('returns a 200', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the archived rev doesn't match", function () {
|
||||
beforeEach(function (done) {
|
||||
this.res.sendStatus.callsFake(() => done())
|
||||
this.DocArchiveManager.unArchiveAllDocs.yields(
|
||||
new Errors.DocRevValueError('bad rev')
|
||||
)
|
||||
this.HttpController.unArchiveAllDocs(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('returns a 409', function () {
|
||||
expect(this.res.sendStatus).to.have.been.calledWith(409)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(function () {
|
||||
this.req.params = { project_id: this.projectId }
|
||||
this.DocArchiveManager.destroyProject = sinon.stub().callsArg(1)
|
||||
this.HttpController.destroyProject(this.req, this.res, this.next)
|
||||
})
|
||||
|
||||
it('should destroy the docs', function () {
|
||||
sinon.assert.calledWith(
|
||||
this.DocArchiveManager.destroyProject,
|
||||
this.projectId
|
||||
)
|
||||
})
|
||||
|
||||
it('should return 204', function () {
|
||||
sinon.assert.calledWith(this.res.sendStatus, 204)
|
||||
})
|
||||
})
|
||||
})
|
407
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
407
services/docstore/test/unit/js/MongoManagerTests.js
Normal file
@@ -0,0 +1,407 @@
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/MongoManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
const { assert, expect } = require('chai')
|
||||
const Errors = require('../../../app/js/Errors')
|
||||
|
||||
describe('MongoManager', function () {
|
||||
beforeEach(function () {
|
||||
this.db = {
|
||||
docs: {
|
||||
updateOne: sinon.stub().resolves({ matchedCount: 1 }),
|
||||
insertOne: sinon.stub().resolves(),
|
||||
},
|
||||
}
|
||||
this.MongoManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
db: this.db,
|
||||
ObjectId,
|
||||
},
|
||||
'@overleaf/settings': {
|
||||
max_deleted_docs: 42,
|
||||
docstore: { archivingLockDurationMs: 5000 },
|
||||
},
|
||||
'./Errors': Errors,
|
||||
},
|
||||
})
|
||||
this.projectId = new ObjectId().toString()
|
||||
this.docId = new ObjectId().toString()
|
||||
this.rev = 42
|
||||
this.stubbedErr = new Error('hello world')
|
||||
this.lines = ['Three French hens', 'Two turtle doves']
|
||||
})
|
||||
|
||||
describe('findDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.doc = { name: 'mock-doc' }
|
||||
this.db.docs.findOne = sinon.stub().resolves(this.doc)
|
||||
this.filter = { lines: true }
|
||||
this.result = await this.MongoManager.promises.findDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the doc', function () {
|
||||
this.db.docs.findOne
|
||||
.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the doc', function () {
|
||||
expect(this.doc).to.deep.equal(this.doc)
|
||||
})
|
||||
})
|
||||
|
||||
describe('patchDoc', function () {
|
||||
beforeEach(async function () {
|
||||
this.meta = { name: 'foo.tex' }
|
||||
await this.MongoManager.promises.patchDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.meta
|
||||
)
|
||||
})
|
||||
|
||||
it('should pass the parameter along', function () {
|
||||
this.db.docs.updateOne.should.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
$set: this.meta,
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDocs', function () {
|
||||
beforeEach(function () {
|
||||
this.filter = { lines: true }
|
||||
this.doc1 = { name: 'mock-doc1' }
|
||||
this.doc2 = { name: 'mock-doc2' }
|
||||
this.doc3 = { name: 'mock-doc3' }
|
||||
this.doc4 = { name: 'mock-doc4' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc, this.doc3, this.doc4]),
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = false', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: false },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the non-deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: { $ne: true },
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should call return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
|
||||
describe('with included_deleted = true', function () {
|
||||
beforeEach(async function () {
|
||||
this.result = await this.MongoManager.promises.getProjectsDocs(
|
||||
this.projectId,
|
||||
{ include_deleted: true },
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find all via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(
|
||||
{
|
||||
project_id: new ObjectId(this.projectId),
|
||||
},
|
||||
{
|
||||
projection: this.filter,
|
||||
}
|
||||
)
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc, this.doc3, this.doc4])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getProjectsDeletedDocs', function () {
|
||||
beforeEach(async function () {
|
||||
this.filter = { name: true }
|
||||
this.doc1 = { _id: '1', name: 'mock-doc1.tex' }
|
||||
this.doc2 = { _id: '2', name: 'mock-doc2.tex' }
|
||||
this.doc3 = { _id: '3', name: 'mock-doc3.tex' }
|
||||
this.db.docs.find = sinon.stub().returns({
|
||||
toArray: sinon.stub().resolves([this.doc1, this.doc2, this.doc3]),
|
||||
})
|
||||
this.result = await this.MongoManager.promises.getProjectsDeletedDocs(
|
||||
this.projectId,
|
||||
this.filter
|
||||
)
|
||||
})
|
||||
|
||||
it('should find the deleted docs via the project_id', function () {
|
||||
this.db.docs.find
|
||||
.calledWith({
|
||||
project_id: new ObjectId(this.projectId),
|
||||
deleted: true,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should filter, sort by deletedAt and limit', function () {
|
||||
this.db.docs.find
|
||||
.calledWith(sinon.match.any, {
|
||||
projection: this.filter,
|
||||
sort: { deletedAt: -1 },
|
||||
limit: 42,
|
||||
})
|
||||
.should.equal(true)
|
||||
})
|
||||
|
||||
it('should return the docs', function () {
|
||||
expect(this.result).to.deep.equal([this.doc1, this.doc2, this.doc3])
|
||||
})
|
||||
})
|
||||
|
||||
describe('upsertIntoDocCollection', function () {
|
||||
beforeEach(function () {
|
||||
this.oldRev = 77
|
||||
})
|
||||
|
||||
it('should upsert the document', async function () {
|
||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.oldRev,
|
||||
{ lines: this.lines }
|
||||
)
|
||||
|
||||
const args = this.db.docs.updateOne.args[0]
|
||||
assert.deepEqual(args[0], {
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.oldRev,
|
||||
})
|
||||
assert.equal(args[1].$set.lines, this.lines)
|
||||
assert.equal(args[1].$inc.rev, 1)
|
||||
})
|
||||
|
||||
it('should handle update error', async function () {
|
||||
this.db.docs.updateOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.rev,
|
||||
{
|
||||
lines: this.lines,
|
||||
}
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should insert without a previous rev', async function () {
|
||||
await this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
|
||||
expect(this.db.docs.insertOne).to.have.been.calledWith({
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: 1,
|
||||
lines: this.lines,
|
||||
ranges: this.ranges,
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle generic insert error', async function () {
|
||||
this.db.docs.insertOne.rejects(this.stubbedErr)
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(this.stubbedErr)
|
||||
})
|
||||
|
||||
it('should handle duplicate insert error', async function () {
|
||||
this.db.docs.insertOne.rejects({ code: 11000 })
|
||||
await expect(
|
||||
this.MongoManager.promises.upsertIntoDocCollection(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
null,
|
||||
{ lines: this.lines, ranges: this.ranges }
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('destroyProject', function () {
|
||||
beforeEach(async function () {
|
||||
this.projectId = new ObjectId()
|
||||
this.db.docs.deleteMany = sinon.stub().resolves()
|
||||
await this.MongoManager.promises.destroyProject(this.projectId)
|
||||
})
|
||||
|
||||
it('should destroy all docs', function () {
|
||||
sinon.assert.calledWith(this.db.docs.deleteMany, {
|
||||
project_id: this.projectId,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('checkRevUnchanged', function () {
|
||||
this.beforeEach(function () {
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: 1 }
|
||||
})
|
||||
|
||||
it('should not error when the rev has not changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 1 })
|
||||
await this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
})
|
||||
|
||||
it('should return an error when the rev has changed', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocModifiedError)
|
||||
})
|
||||
|
||||
it('should return a value error if incoming rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: 2 })
|
||||
this.doc = { _id: new ObjectId(), name: 'mock-doc', rev: NaN }
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
|
||||
it('should return a value error if checked doc rev is NaN', async function () {
|
||||
this.db.docs.findOne = sinon.stub().resolves({ rev: NaN })
|
||||
await expect(
|
||||
this.MongoManager.promises.checkRevUnchanged(this.doc)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoreArchivedDoc', function () {
|
||||
beforeEach(function () {
|
||||
this.archivedDoc = {
|
||||
lines: ['a', 'b', 'c'],
|
||||
ranges: { some: 'ranges' },
|
||||
rev: 2,
|
||||
}
|
||||
})
|
||||
|
||||
describe('complete doc', function () {
|
||||
beforeEach(async function () {
|
||||
await this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('updates Mongo', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: this.archivedDoc.ranges,
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('without ranges', function () {
|
||||
beforeEach(async function () {
|
||||
delete this.archivedDoc.ranges
|
||||
await this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
})
|
||||
|
||||
it('sets ranges to an empty object', function () {
|
||||
expect(this.db.docs.updateOne).to.have.been.calledWith(
|
||||
{
|
||||
_id: new ObjectId(this.docId),
|
||||
project_id: new ObjectId(this.projectId),
|
||||
rev: this.archivedDoc.rev,
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
lines: this.archivedDoc.lines,
|
||||
ranges: {},
|
||||
},
|
||||
$unset: {
|
||||
inS3: true,
|
||||
},
|
||||
}
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("when the update doesn't succeed", function () {
|
||||
it('throws a DocRevValueError', async function () {
|
||||
this.db.docs.updateOne.resolves({ matchedCount: 0 })
|
||||
await expect(
|
||||
this.MongoManager.promises.restoreArchivedDoc(
|
||||
this.projectId,
|
||||
this.docId,
|
||||
this.archivedDoc
|
||||
)
|
||||
).to.be.rejectedWith(Errors.DocRevValueError)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
55
services/docstore/test/unit/js/PersistorManagerTests.js
Normal file
55
services/docstore/test/unit/js/PersistorManagerTests.js
Normal file
@@ -0,0 +1,55 @@
|
||||
const { expect } = require('chai')
|
||||
const modulePath = '../../../app/js/PersistorManager.js'
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
|
||||
describe('PersistorManager', function () {
|
||||
class FakePersistor {
|
||||
async sendStream() {
|
||||
return 'sent'
|
||||
}
|
||||
}
|
||||
|
||||
describe('configured', function () {
|
||||
it('should return fake persistor', function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: 'gcs',
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger).to.be.instanceof(FakePersistor)
|
||||
expect(PersistorManger.sendStream()).to.eventually.equal('sent')
|
||||
})
|
||||
})
|
||||
|
||||
describe('not configured', function () {
|
||||
it('should return abstract persistor', async function () {
|
||||
const Settings = {
|
||||
docstore: {
|
||||
backend: undefined,
|
||||
bucket: 'wombat',
|
||||
},
|
||||
}
|
||||
const PersistorManger = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'@overleaf/settings': Settings,
|
||||
'@overleaf/object-persistor': () => new FakePersistor(),
|
||||
'@overleaf/metrics': {},
|
||||
},
|
||||
})
|
||||
|
||||
expect(PersistorManger.constructor.name).to.equal('AbstractPersistor')
|
||||
expect(PersistorManger.sendStream()).to.eventually.be.rejectedWith(
|
||||
/method not implemented in persistor/
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
253
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
253
services/docstore/test/unit/js/RangeManagerTests.js
Normal file
@@ -0,0 +1,253 @@
|
||||
/* eslint-disable
|
||||
no-return-assign,
|
||||
no-unused-vars,
|
||||
*/
|
||||
// TODO: This file was created by bulk-decaffeinate.
|
||||
// Fix any style issues and re-enable lint.
|
||||
/*
|
||||
* decaffeinate suggestions:
|
||||
* DS102: Remove unnecessary code created because of implicit returns
|
||||
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
|
||||
*/
|
||||
const SandboxedModule = require('sandboxed-module')
|
||||
const sinon = require('sinon')
|
||||
const { assert, expect } = require('chai')
|
||||
const modulePath = require('node:path').join(
|
||||
__dirname,
|
||||
'../../../app/js/RangeManager'
|
||||
)
|
||||
const { ObjectId } = require('mongodb-legacy')
|
||||
|
||||
describe('RangeManager', function () {
|
||||
beforeEach(function () {
|
||||
return (this.RangeManager = SandboxedModule.require(modulePath, {
|
||||
requires: {
|
||||
'./mongodb': {
|
||||
ObjectId,
|
||||
},
|
||||
},
|
||||
}))
|
||||
})
|
||||
|
||||
describe('jsonRangesToMongo', function () {
|
||||
it('should convert ObjectIds and dates to proper objects', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(changeId),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(userId),
|
||||
ts: new Date(ts),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(commentId),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId(threadId) },
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it('should leave malformed ObjectIds as they are', function () {
|
||||
const changeId = 'foo'
|
||||
const commentId = 'bar'
|
||||
const userId = 'baz'
|
||||
return this.RangeManager.jsonRangesToMongo({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
}).should.deep.equal({
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
return it('should be consistent when transformed through json -> mongo -> json', function () {
|
||||
const changeId = new ObjectId().toString()
|
||||
const commentId = new ObjectId().toString()
|
||||
const userId = new ObjectId().toString()
|
||||
const threadId = new ObjectId().toString()
|
||||
const ts = new Date().toJSON()
|
||||
const ranges1 = {
|
||||
changes: [
|
||||
{
|
||||
id: changeId,
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: userId,
|
||||
ts,
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: commentId,
|
||||
op: { c: 'foo', p: 3, t: threadId },
|
||||
},
|
||||
],
|
||||
}
|
||||
const ranges1Copy = JSON.parse(JSON.stringify(ranges1)) // jsonRangesToMongo modifies in place
|
||||
const ranges2 = JSON.parse(
|
||||
JSON.stringify(this.RangeManager.jsonRangesToMongo(ranges1Copy))
|
||||
)
|
||||
return ranges1.should.deep.equal(ranges2)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('shouldUpdateRanges', function () {
|
||||
beforeEach(function () {
|
||||
this.ranges = {
|
||||
changes: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { i: 'foo', p: 3 },
|
||||
metadata: {
|
||||
user_id: new ObjectId(),
|
||||
ts: new Date(),
|
||||
},
|
||||
},
|
||||
],
|
||||
comments: [
|
||||
{
|
||||
id: new ObjectId(),
|
||||
op: { c: 'foo', p: 3, t: new ObjectId() },
|
||||
},
|
||||
],
|
||||
}
|
||||
return (this.ranges_copy = this.RangeManager.jsonRangesToMongo(
|
||||
JSON.parse(JSON.stringify(this.ranges))
|
||||
))
|
||||
})
|
||||
|
||||
describe('with a blank new range', function () {
|
||||
return it('should throw an error', function () {
|
||||
return expect(() => {
|
||||
return this.RangeManager.shouldUpdateRanges(this.ranges, null)
|
||||
}).to.throw(Error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with a blank old range', function () {
|
||||
return it('should treat it like {}', function () {
|
||||
this.RangeManager.shouldUpdateRanges(null, {}).should.equal(false)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
null,
|
||||
this.ranges
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('with no changes', function () {
|
||||
return it('should return false', function () {
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(false)
|
||||
})
|
||||
})
|
||||
|
||||
return describe('with changes', function () {
|
||||
it('should return true when the change id changes', function () {
|
||||
this.ranges_copy.changes[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change user id changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.user_id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change ts changes', function () {
|
||||
this.ranges_copy.changes[0].metadata.ts = new Date(Date.now() + 1000)
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the change op changes', function () {
|
||||
this.ranges_copy.changes[0].op.i = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment id changes', function () {
|
||||
this.ranges_copy.comments[0].id = new ObjectId()
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
it('should return true when the comment offset changes', function () {
|
||||
this.ranges_copy.comments[0].op.p = 17
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
|
||||
return it('should return true when the comment content changes', function () {
|
||||
this.ranges_copy.comments[0].op.c = 'bar'
|
||||
return this.RangeManager.shouldUpdateRanges(
|
||||
this.ranges,
|
||||
this.ranges_copy
|
||||
).should.equal(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
12
services/docstore/tsconfig.json
Normal file
12
services/docstore/tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"extends": "../../tsconfig.backend.json",
|
||||
"include": [
|
||||
"app.js",
|
||||
"app/js/**/*",
|
||||
"benchmarks/**/*",
|
||||
"config/**/*",
|
||||
"scripts/**/*",
|
||||
"test/**/*",
|
||||
"types"
|
||||
]
|
||||
}
|
Reference in New Issue
Block a user